diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java index 684dec3c25..2a1c50283f 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java @@ -1347,8 +1347,13 @@ private static void checkColumnName(String columnName) throws SemanticException ASTNode child = (ASTNode) ast.getChild(i); int directionCode = DirectionUtils.tokenToCode(child.getToken().getType()); child = (ASTNode) child.getChild(0); + if (child.getToken().getType() != HiveParser.TOK_NULLS_FIRST && directionCode == DirectionUtils.ASCENDING_CODE) { + throw new SemanticException( + "create/alter bucketed table: not supported NULLS LAST for SORTED BY in ASC order"); + } if (child.getToken().getType() != HiveParser.TOK_NULLS_LAST && directionCode == DirectionUtils.DESCENDING_CODE) { - throw new SemanticException("create/alter table: not supported NULLS FIRST for ORDER BY in DESC order"); + throw new SemanticException( + "create/alter bucketed table: not supported NULLS FIRST for SORTED BY in DESC order"); } colList.add(new Order(unescapeIdentifier(child.getChild(0).getText()).toLowerCase(), directionCode)); } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g index caee02d2e3..d9ee40b42e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g +++ ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g @@ -2471,9 +2471,7 @@ columnNameOrder @init { pushMsg("column name order", state); } @after { popMsg(state); } : identifier orderSpec=orderSpecification? nullSpec=nullOrdering? - -> {$orderSpec.tree == null && $nullSpec.tree == null && nullsLast()}? - ^(TOK_TABSORTCOLNAMEASC ^(TOK_NULLS_LAST identifier)) - -> {$orderSpec.tree == null && $nullSpec.tree == null && !nullsLast()}? + -> {$orderSpec.tree == null && $nullSpec.tree == null}? ^(TOK_TABSORTCOLNAMEASC ^(TOK_NULLS_FIRST identifier)) -> {$orderSpec.tree == null}? ^(TOK_TABSORTCOLNAMEASC ^($nullSpec identifier)) diff --git ql/src/test/queries/clientpositive/nullshandling.q ql/src/test/queries/clientpositive/nullshandling.q new file mode 100644 index 0000000000..ab2eb8b8e3 --- /dev/null +++ ql/src/test/queries/clientpositive/nullshandling.q @@ -0,0 +1,33 @@ +set hive.default.nulls.last=false; + +CREATE TABLE table1 ( + k1 STRING, + f1 STRING, + sequence_num BIGINT, + create_bsk BIGINT, + change_bsk BIGINT, + op_code STRING ) +PARTITIONED BY (run_id BIGINT) +CLUSTERED BY (k1) SORTED BY (k1, change_bsk, sequence_num) INTO 4 BUCKETS +STORED AS ORC; + +DESCRIBE table1; + +DROP TABLE table1; + +set hive.default.nulls.last=true; + +CREATE TABLE table2 ( + k1 STRING, + f1 STRING, + sequence_num BIGINT, + create_bsk BIGINT, + change_bsk BIGINT, + op_code STRING ) +PARTITIONED BY (run_id BIGINT) +CLUSTERED BY (k1) SORTED BY (k1, change_bsk, sequence_num) INTO 4 BUCKETS +STORED AS ORC; + +DESCRIBE table2; + +DROP TABLE table2; \ No newline at end of file diff --git ql/src/test/results/clientpositive/nullshandling.q.out ql/src/test/results/clientpositive/nullshandling.q.out new file mode 100644 index 0000000000..8a941c1f09 --- /dev/null +++ ql/src/test/results/clientpositive/nullshandling.q.out @@ -0,0 +1,102 @@ +PREHOOK: query: CREATE TABLE table1 ( + k1 STRING, + f1 STRING, + sequence_num BIGINT, + create_bsk BIGINT, + change_bsk BIGINT, + op_code STRING ) +PARTITIONED BY (run_id BIGINT) +CLUSTERED BY (k1) SORTED BY (k1, change_bsk, sequence_num) INTO 4 BUCKETS +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table1 +POSTHOOK: query: CREATE TABLE table1 ( + k1 STRING, + f1 STRING, + sequence_num BIGINT, + create_bsk BIGINT, + change_bsk BIGINT, + op_code STRING ) +PARTITIONED BY (run_id BIGINT) +CLUSTERED BY (k1) SORTED BY (k1, change_bsk, sequence_num) INTO 4 BUCKETS +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table1 +PREHOOK: query: DESCRIBE table1 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table1 +POSTHOOK: query: DESCRIBE table1 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table1 +k1 string +f1 string +sequence_num bigint +create_bsk bigint +change_bsk bigint +op_code string +run_id bigint + +# Partition Information +# col_name data_type comment +run_id bigint +PREHOOK: query: DROP TABLE table1 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table1 +PREHOOK: Output: default@table1 +POSTHOOK: query: DROP TABLE table1 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table1 +POSTHOOK: Output: default@table1 +PREHOOK: query: CREATE TABLE table2 ( + k1 STRING, + f1 STRING, + sequence_num BIGINT, + create_bsk BIGINT, + change_bsk BIGINT, + op_code STRING ) +PARTITIONED BY (run_id BIGINT) +CLUSTERED BY (k1) SORTED BY (k1, change_bsk, sequence_num) INTO 4 BUCKETS +STORED AS ORC +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@table2 +POSTHOOK: query: CREATE TABLE table2 ( + k1 STRING, + f1 STRING, + sequence_num BIGINT, + create_bsk BIGINT, + change_bsk BIGINT, + op_code STRING ) +PARTITIONED BY (run_id BIGINT) +CLUSTERED BY (k1) SORTED BY (k1, change_bsk, sequence_num) INTO 4 BUCKETS +STORED AS ORC +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@table2 +PREHOOK: query: DESCRIBE table2 +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@table2 +POSTHOOK: query: DESCRIBE table2 +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@table2 +k1 string +f1 string +sequence_num bigint +create_bsk bigint +change_bsk bigint +op_code string +run_id bigint + +# Partition Information +# col_name data_type comment +run_id bigint +PREHOOK: query: DROP TABLE table2 +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@table2 +PREHOOK: Output: default@table2 +POSTHOOK: query: DROP TABLE table2 +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@table2 +POSTHOOK: Output: default@table2