diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 3e8e1b3981..cb5720036f 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -765,10 +765,11 @@ private static void constraintInfosToCheckConstraints(String catName, String dat } protected static void processDefaultConstraints(String catName, String databaseName, String tableName, - ASTNode child, List columnNames, List defaultConstraints, final ASTNode typeChild) + ASTNode child, List columnNames, List defaultConstraints, final ASTNode typeChild, + final TokenRewriteStream tokenRewriteStream) throws SemanticException { List defaultInfos = new ArrayList(); - generateConstraintInfos(child, columnNames, defaultInfos, typeChild, null); + generateConstraintInfos(child, columnNames, defaultInfos, typeChild, tokenRewriteStream); constraintInfosToDefaultConstraints(catName, databaseName, tableName, defaultInfos, defaultConstraints); } @@ -930,7 +931,8 @@ private static String getCheckExpression(ASTNode checkExprAST, final TokenRewrit * @return retrieve the default value and return it as string * @throws SemanticException */ - private static String getDefaultValue(ASTNode defaultValueAST, ASTNode typeChild) throws SemanticException{ + private static String getDefaultValue(ASTNode defaultValueAST, ASTNode typeChild, + final TokenRewriteStream tokenStream) throws SemanticException{ // first create expression from defaultValueAST TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null); ExprNodeDesc defaultValExpr = TypeCheckProcFactory @@ -942,7 +944,8 @@ private static String getDefaultValue(ASTNode defaultValueAST, ASTNode typeChild } //get default value to be be stored in metastore - String defaultValueText = defaultValExpr.getExprString(); + String defaultValueText = tokenStream.toOriginalString(defaultValueAST.getTokenStartIndex(), + defaultValueAST.getTokenStopIndex()); final int DEFAULT_MAX_LEN = 255; if(defaultValueText.length() > DEFAULT_MAX_LEN) { throw new SemanticException( @@ -1026,7 +1029,7 @@ private static void generateConstraintInfos(ASTNode child, List columnNa rely = false; } else if( child.getToken().getType() == HiveParser.TOK_DEFAULT_VALUE){ // try to get default value only if this is DEFAULT constraint - checkOrDefaultValue = getDefaultValue(grandChild, typeChildForDefault); + checkOrDefaultValue = getDefaultValue(grandChild, typeChildForDefault, tokenRewriteStream); } else if(child.getToken().getType() == HiveParser.TOK_CHECK_CONSTRAINT) { checkOrDefaultValue = getCheckExpression(grandChild, tokenRewriteStream); @@ -1259,7 +1262,7 @@ private static void checkColumnName(String columnName) throws SemanticException break; case HiveParser.TOK_DEFAULT_VALUE: processDefaultConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild, - ImmutableList.of(col.getName()), defaultConstraints, typeChild); + ImmutableList.of(col.getName()), defaultConstraints, typeChild, tokenRewriteStream); break; case HiveParser.TOK_NOT_NULL: processNotNullConstraints(catName, qualifiedTabName[0], qualifiedTabName[1], constraintChild,