diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java index d39b8bd..d9bdb66 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java @@ -13191,7 +13191,11 @@ void buildPTFReduceSinkDetails(PartitionedTableFunctionDef tabDef, List partColList = tabDef.getPartition().getExpressions(); for (PTFExpressionDef colDef : partColList) { - ExprNodeDesc exprNode = colDef.getExprNode(); + final ExprNodeDesc exprNode = colDef.getExprNode(); + if (ExprNodeDescUtils.isConstant(exprNode)) { + // ignore partitioning by constants + continue; + } if (ExprNodeDescUtils.indexOf(exprNode, partCols) < 0) { partCols.add(exprNode); orderCols.add(exprNode); @@ -13209,10 +13213,15 @@ void buildPTFReduceSinkDetails(PartitionedTableFunctionDef tabDef, */ List orderColList = tabDef.getOrder().getExpressions(); for (int i = 0; i < orderColList.size(); i++) { - OrderExpressionDef colDef = orderColList.get(i); - char orderChar = colDef.getOrder() == PTFInvocationSpec.Order.ASC ? '+' : '-'; - char nullOrderChar = colDef.getNullOrder() == PTFInvocationSpec.NullOrder.NULLS_FIRST ? 'a' : 'z'; - int index = ExprNodeDescUtils.indexOf(colDef.getExprNode(), orderCols); + final OrderExpressionDef colDef = orderColList.get(i); + final char orderChar = colDef.getOrder() == PTFInvocationSpec.Order.ASC ? '+' : '-'; + final char nullOrderChar = colDef.getNullOrder() == PTFInvocationSpec.NullOrder.NULLS_FIRST ? 'a' : 'z'; + final ExprNodeDesc exprNode = colDef.getExprNode(); + if (ExprNodeDescUtils.isConstant(exprNode)) { + // ignore ordering by constants + continue; + } + int index = ExprNodeDescUtils.indexOf(exprNode, orderCols); if (index >= 0) { orderString.setCharAt(index, orderChar); nullOrderString.setCharAt(index, nullOrderChar); @@ -13349,7 +13358,10 @@ private Operator genReduceSinkPlanForWindowing(WindowingSpec spec, StringBuilder nullOrder = new StringBuilder(); for (PartitionExpression partCol : spec.getQueryPartitionSpec().getExpressions()) { - ExprNodeDesc partExpr = genExprNodeDesc(partCol.getExpression(), inputRR); + final ExprNodeDesc partExpr = genExprNodeDesc(partCol.getExpression(), inputRR); + if (ExprNodeDescUtils.isConstant(partExpr)) { + continue; + } if (ExprNodeDescUtils.indexOf(partExpr, partCols) < 0) { partCols.add(partExpr); orderCols.add(partExpr); @@ -13360,7 +13372,10 @@ private Operator genReduceSinkPlanForWindowing(WindowingSpec spec, if (spec.getQueryOrderSpec() != null) { for (OrderExpression orderCol : spec.getQueryOrderSpec().getExpressions()) { - ExprNodeDesc orderExpr = genExprNodeDesc(orderCol.getExpression(), inputRR); + final ExprNodeDesc orderExpr = genExprNodeDesc(orderCol.getExpression(), inputRR); + if (ExprNodeDescUtils.isConstant(orderExpr)) { + continue; + } char orderChar = orderCol.getOrder() == PTFInvocationSpec.Order.ASC ? '+' : '-'; char nullOrderChar = orderCol.getNullOrder() == PTFInvocationSpec.NullOrder.NULLS_FIRST ? 'a' : 'z'; int index = ExprNodeDescUtils.indexOf(orderExpr, orderCols); diff --git ql/src/test/queries/clientpositive/windowing_navfn.q ql/src/test/queries/clientpositive/windowing_navfn.q index f2ec9fc..7c27e22 100644 --- ql/src/test/queries/clientpositive/windowing_navfn.q +++ ql/src/test/queries/clientpositive/windowing_navfn.q @@ -17,6 +17,8 @@ create table over10k( load data local inpath '../../data/files/over10k' into table over10k; +explain select row_number() over() from src where key = '238'; + select row_number() over() from src where key = '238'; select s, row_number() over (partition by d order by `dec`) from over10k limit 100; diff --git ql/src/test/results/clientpositive/windowing_navfn.q.out ql/src/test/results/clientpositive/windowing_navfn.q.out index 0976b54..3d783d0 100644 --- ql/src/test/results/clientpositive/windowing_navfn.q.out +++ ql/src/test/results/clientpositive/windowing_navfn.q.out @@ -44,6 +44,66 @@ POSTHOOK: query: load data local inpath '../../data/files/over10k' into table ov POSTHOOK: type: LOAD #### A masked pattern was here #### POSTHOOK: Output: default@over10k +PREHOOK: query: explain select row_number() over() from src where key = '238' +PREHOOK: type: QUERY +POSTHOOK: query: explain select row_number() over() from src where key = '238' +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: src + Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: (key = '238') (type: boolean) + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Reduce Operator Tree: + PTF Operator + Function definitions: + Input definition + input alias: ptf_0 + output shape: + type: WINDOWING + Windowing table definition + input alias: ptf_1 + name: windowingtablefunction + order by: 0 ASC NULLS FIRST + partition by: 0 + raw input shape: + window functions: + window function definition + alias: row_number_window_0 + name: row_number + window function: GenericUDAFRowNumberEvaluator + window frame: PRECEDING(MAX)~FOLLOWING(MAX) + isPivotResult: true + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: row_number_window_0 (type: int) + outputColumnNames: _col0 + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + PREHOOK: query: select row_number() over() from src where key = '238' PREHOOK: type: QUERY PREHOOK: Input: default@src