diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index b311bb5..a78f850 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -1610,6 +1610,7 @@ private RelNode copyNodeScan(RelNode scan) { DruidRules.AGGREGATE_PROJECT, DruidRules.PROJECT, DruidRules.AGGREGATE, + DruidRules.POST_AGGREGATION_PROJECT, DruidRules.FILTER_AGGREGATE_TRANSPOSE, DruidRules.FILTER_PROJECT_TRANSPOSE, DruidRules.SORT_PROJECT_TRANSPOSE, diff --git ql/src/test/queries/clientpositive/druid_basic3.q ql/src/test/queries/clientpositive/druid_basic3.q new file mode 100644 index 0000000..624beeb --- /dev/null +++ ql/src/test/queries/clientpositive/druid_basic3.q @@ -0,0 +1,95 @@ +set hive.strict.checks.cartesian.product=false; +set hive.druid.broker.address.default=localhost.test; + +CREATE EXTERNAL TABLE druid_table_1 +STORED BY 'org.apache.hadoop.hive.druid.QTestDruidStorageHandler' +TBLPROPERTIES ("druid.datasource" = "wikipedia"); + +EXPLAIN +SELECT sum(added) + sum(delta) as a, language +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC; + +EXPLAIN +SELECT sum(delta), sum(added) + sum(delta) AS a, language +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC; + +EXPLAIN +SELECT language, sum(added) / sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC; + +EXPLAIN +SELECT language, sum(added) * sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC; + +EXPLAIN +SELECT language, sum(added) - sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC; + +EXPLAIN +SELECT language, sum(added) + 100 AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC; + +EXPLAIN +SELECT language, -1 * (a + b) AS c +FROM ( + SELECT (sum(added)-sum(delta)) / (count(*) * 3) AS a, sum(deleted) AS b, language + FROM druid_table_1 + GROUP BY language) subq +ORDER BY c DESC; + +EXPLAIN +SELECT language, robot, sum(added) - sum(delta) AS a +FROM druid_table_1 +WHERE extract (week from `__time`) IN (10,11) +GROUP BY language, robot; + +EXPLAIN +SELECT language, sum(delta) / count(*) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC; + +EXPLAIN +SELECT language, sum(added) / sum(delta) AS a, + CASE WHEN sum(deleted)=0 THEN 1.0 ELSE sum(deleted) END AS b +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC; + +EXPLAIN +SELECT language, a, a - b as c +FROM ( + SELECT language, sum(added) + 100 AS a, sum(delta) AS b + FROM druid_table_1 + GROUP BY language) subq +ORDER BY a DESC; + +EXPLAIN +SELECT language, robot, "A" +FROM ( + SELECT sum(added) - sum(delta) AS a, language, robot + FROM druid_table_1 + GROUP BY language, robot ) subq +ORDER BY "A" +LIMIT 5; + +EXPLAIN +SELECT language, robot, "A" +FROM ( + SELECT language, sum(added) + sum(delta) AS a, robot + FROM druid_table_1 + GROUP BY language, robot) subq +ORDER BY robot, language +LIMIT 5; diff --git ql/src/test/results/clientpositive/druid_basic3.q.out ql/src/test/results/clientpositive/druid_basic3.q.out new file mode 100644 index 0000000..c962799 --- /dev/null +++ ql/src/test/results/clientpositive/druid_basic3.q.out @@ -0,0 +1,565 @@ +PREHOOK: query: CREATE EXTERNAL TABLE druid_table_1 +STORED BY 'org.apache.hadoop.hive.druid.QTestDruidStorageHandler' +TBLPROPERTIES ("druid.datasource" = "wikipedia") +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@druid_table_1 +POSTHOOK: query: CREATE EXTERNAL TABLE druid_table_1 +STORED BY 'org.apache.hadoop.hive.druid.QTestDruidStorageHandler' +TBLPROPERTIES ("druid.datasource" = "wikipedia") +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@druid_table_1 +PREHOOK: query: EXPLAIN +SELECT sum(added) + sum(delta) as a, language +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT sum(added) + sum(delta) as a, language +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default","columns":[{"dimension":"postagg#0","direction":"descending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"delta"}],"postAggregations":[{"type":"arithmetic","name":"postagg#0","fn":"+","fields":[{"type":"fieldAccess","name":"","fieldName":"$f1"},{"type":"fieldAccess","name":"","fieldName":"$f2"}]}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: postagg#0 (type: float), language (type: string) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + ListSink + +PREHOOK: query: EXPLAIN +SELECT sum(delta), sum(added) + sum(delta) AS a, language +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT sum(delta), sum(added) + sum(delta) AS a, language +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default","columns":[{"dimension":"postagg#0","direction":"descending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"delta"},{"type":"doubleSum","name":"$f2","fieldName":"added"}],"postAggregations":[{"type":"arithmetic","name":"postagg#0","fn":"+","fields":[{"type":"fieldAccess","name":"","fieldName":"$f2"},{"type":"fieldAccess","name":"","fieldName":"$f1"}]}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: $f1 (type: float), postagg#0 (type: float), language (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, sum(added) / sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, sum(added) / sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default","columns":[{"dimension":"postagg#0","direction":"descending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"delta"}],"postAggregations":[{"type":"arithmetic","name":"postagg#0","fn":"quotient","fields":[{"type":"fieldAccess","name":"","fieldName":"$f1"},{"type":"fieldAccess","name":"","fieldName":"$f2"}]}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), postagg#0 (type: float) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, sum(added) * sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, sum(added) * sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default","columns":[{"dimension":"postagg#0","direction":"descending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"delta"}],"postAggregations":[{"type":"arithmetic","name":"postagg#0","fn":"*","fields":[{"type":"fieldAccess","name":"","fieldName":"$f1"},{"type":"fieldAccess","name":"","fieldName":"$f2"}]}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), postagg#0 (type: float) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, sum(added) - sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, sum(added) - sum(delta) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default","columns":[{"dimension":"postagg#0","direction":"descending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"delta"}],"postAggregations":[{"type":"arithmetic","name":"postagg#0","fn":"-","fields":[{"type":"fieldAccess","name":"","fieldName":"$f1"},{"type":"fieldAccess","name":"","fieldName":"$f2"}]}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), postagg#0 (type: float) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, sum(added) + 100.0 AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, sum(added) + 100.0 AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"added"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), ($f1 + 100.0) (type: double) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: double) + sort order: - + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: string) + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, -1 * (a + b) AS c +FROM ( + SELECT (sum(added)-sum(delta)) / (count(*) * 3.0) AS a, sum(deleted) AS b, language + FROM druid_table_1 + GROUP BY language) subq +ORDER BY c DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, -1 * (a + b) AS c +FROM ( + SELECT (sum(added)-sum(delta)) / (count(*) * 3.0) AS a, sum(deleted) AS b, language + FROM druid_table_1 + GROUP BY language) subq +ORDER BY c DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"delta"},{"type":"count","name":"$f3"},{"type":"doubleSum","name":"$f4","fieldName":"deleted"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), (-1.0 * ((($f1 - $f2) / UDFToDouble((CAST( $f3 AS decimal(19,0)) * 3))) + $f4)) (type: double) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: double) + sort order: - + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: string) + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, robot, sum(added) - sum(delta) AS a +FROM druid_table_1 +WHERE extract (week from `__time`) IN (10,11) +GROUP BY language, robot +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, robot, sum(added) - sum(delta) AS a +FROM druid_table_1 +WHERE extract (week from `__time`) IN (10,11) +GROUP BY language, robot +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"robot"},{"type":"default","dimension":"language"}],"limitSpec":{"type":"default"},"filter":{"type":"in","dimension":"__time","values":["10","11"],"extractionFn":{"type":"timeFormat","format":"w","timeZone":"US/Pacific","locale":"en-US"}},"aggregations":[{"type":"doubleSum","name":"$f2","fieldName":"added"},{"type":"doubleSum","name":"$f3","fieldName":"delta"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), robot (type: string), ($f2 - $f3) (type: float) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, sum(delta) / count(*) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, sum(delta) / count(*) AS a +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"delta"},{"type":"count","name":"$f2"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), ($f1 / UDFToDouble($f2)) (type: double) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: double) + sort order: - + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: string) + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: double) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, sum(added) / sum(delta) AS a, + CASE WHEN sum(deleted)=0 THEN 1.0 ELSE sum(deleted) END AS b +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, sum(added) / sum(delta) AS a, + CASE WHEN sum(deleted)=0 THEN 1.0 ELSE sum(deleted) END AS b +FROM druid_table_1 +GROUP BY language +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default","columns":[{"dimension":"postagg#0","direction":"descending","dimensionOrder":"numeric"}]},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"delta"},{"type":"doubleSum","name":"$f3","fieldName":"deleted"}],"postAggregations":[{"type":"arithmetic","name":"postagg#0","fn":"quotient","fields":[{"type":"fieldAccess","name":"","fieldName":"$f1"},{"type":"fieldAccess","name":"","fieldName":"$f2"}]}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), postagg#0 (type: float), CASE WHEN (($f3 = 0.0)) THEN (1) ELSE ($f3) END (type: float) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, a, a - b as c +FROM ( + SELECT language, sum(added) + 100 AS a, sum(delta) AS b + FROM druid_table_1 + GROUP BY language) subq +ORDER BY a DESC +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, a, a - b as c +FROM ( + SELECT language, sum(added) + 100 AS a, sum(delta) AS b + FROM druid_table_1 + GROUP BY language) subq +ORDER BY a DESC +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"language"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"doubleSum","name":"$f1","fieldName":"added"},{"type":"doubleSum","name":"$f2","fieldName":"delta"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), ($f1 + 100.0) (type: double), (($f1 + 100.0) - $f2) (type: double) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: double) + sort order: - + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + value expressions: _col0 (type: string), _col2 (type: double) + Reduce Operator Tree: + Select Operator + expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: double), VALUE._col1 (type: double) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, robot, "A" +FROM ( + SELECT sum(added) - sum(delta) AS a, language, robot + FROM druid_table_1 + GROUP BY language, robot ) subq +ORDER BY "A" +LIMIT 5 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, robot, "A" +FROM ( + SELECT sum(added) - sum(delta) AS a, language, robot + FROM druid_table_1 + GROUP BY language, robot ) subq +ORDER BY "A" +LIMIT 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 5 + Processor Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"robot"},{"type":"default","dimension":"language"}],"limitSpec":{"type":"default"},"aggregations":[{"type":"longSum","name":"dummy_agg","fieldName":"dummy_agg"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), robot (type: string), 'A' (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + ListSink + +PREHOOK: query: EXPLAIN +SELECT language, robot, "A" +FROM ( + SELECT language, sum(added) + sum(delta) AS a, robot + FROM druid_table_1 + GROUP BY language, robot) subq +ORDER BY robot, language +LIMIT 5 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT language, robot, "A" +FROM ( + SELECT language, sum(added) + sum(delta) AS a, robot + FROM druid_table_1 + GROUP BY language, robot) subq +ORDER BY robot, language +LIMIT 5 +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + TableScan + alias: druid_table_1 + properties: + druid.query.json {"queryType":"groupBy","dataSource":"wikipedia","granularity":"all","dimensions":[{"type":"default","dimension":"robot"},{"type":"default","dimension":"language"}],"limitSpec":{"type":"default","limit":5,"columns":[{"dimension":"robot","direction":"ascending","dimensionOrder":"alphanumeric"},{"dimension":"language","direction":"ascending","dimensionOrder":"alphanumeric"}]},"aggregations":[{"type":"longSum","name":"dummy_agg","fieldName":"dummy_agg"}],"intervals":["1900-01-01T00:00:00.000/3000-01-01T00:00:00.000"]} + druid.query.type groupBy + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + Select Operator + expressions: language (type: string), robot (type: string), 'A' (type: string) + outputColumnNames: _col0, _col1, _col2 + Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column stats: NONE + ListSink +