diff --git a/itests/src/test/resources/testconfiguration.properties b/itests/src/test/resources/testconfiguration.properties index c966392d97..3a6ff75b81 100644 --- a/itests/src/test/resources/testconfiguration.properties +++ b/itests/src/test/resources/testconfiguration.properties @@ -657,6 +657,7 @@ minillaplocal.query.files=\ materialized_view_rewrite_8.q,\ materialized_view_rewrite_9.q,\ materialized_view_rewrite_10.q,\ + materialized_view_rewrite_in_between.q,\ materialized_view_rewrite_no_join_opt.q,\ materialized_view_rewrite_no_join_opt_2.q,\ materialized_view_rewrite_part_1.q,\ diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveInBetweenExpandRule.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveInBetweenExpandRule.java new file mode 100644 index 0000000000..b08756e80e --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveInBetweenExpandRule.java @@ -0,0 +1,194 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.optimizer.calcite.rules; + +import java.util.ArrayList; +import java.util.List; +import org.apache.calcite.plan.RelOptRule; +import org.apache.calcite.plan.RelOptRuleCall; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.core.Filter; +import org.apache.calcite.rel.core.Join; +import org.apache.calcite.rel.core.Project; +import org.apache.calcite.rex.RexBuilder; +import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.rex.RexShuttle; +import org.apache.calcite.rex.RexUtil; +import org.apache.calcite.sql.fun.SqlStdOperatorTable; +import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelFactories; +import org.apache.hadoop.hive.ql.optimizer.calcite.translator.RexNodeConverter; + +/** + * This class contains rules to rewrite IN/BETWEEN clauses into their + * corresponding AND/OR versions. + * It is the counterpart to {@link HivePointLookupOptimizerRule}. + */ +public class HiveInBetweenExpandRule { + + public static final FilterRule FILTER_INSTANCE = new FilterRule(); + public static final JoinRule JOIN_INSTANCE = new JoinRule(); + public static final ProjectRule PROJECT_INSTANCE = new ProjectRule(); + + /** Rule adapter to apply the transformation to Filter conditions. */ + private static class FilterRule extends RelOptRule { + + public FilterRule () { + super(operand(Filter.class, any()), HiveRelFactories.HIVE_BUILDER, null); + } + + @Override + public void onMatch(RelOptRuleCall call) { + final Filter filter = call.rel(0); + RexInBetweenExpander expander = new RexInBetweenExpander( + filter.getCluster().getRexBuilder()); + RexNode condition = expander.apply(filter.getCondition()); + + if (!expander.modified) { + return; + } + + RelNode newFilter = filter.copy(filter.getTraitSet(), + filter.getInput(), condition); + + call.transformTo(newFilter); + } + } + + /** Rule adapter to apply the transformation to Join conditions. */ + public static class JoinRule extends RelOptRule { + + public JoinRule () { + super(operand(Join.class, any()), HiveRelFactories.HIVE_BUILDER, null); + } + + @Override + public void onMatch(RelOptRuleCall call) { + final Join join = call.rel(0); + RexInBetweenExpander expander = new RexInBetweenExpander( + join.getCluster().getRexBuilder()); + RexNode condition = expander.apply(join.getCondition()); + + if (!expander.modified) { + return; + } + + RelNode newJoin = join.copy(join.getTraitSet(), + condition, + join.getLeft(), + join.getRight(), + join.getJoinType(), + join.isSemiJoinDone()); + + call.transformTo(newJoin); + } + } + + /** Rule adapter to apply the transformation to Project expressions. */ + public static class ProjectRule extends RelOptRule { + + public ProjectRule () { + super(operand(Project.class, any()), HiveRelFactories.HIVE_BUILDER, null); + } + + @Override + public void onMatch(RelOptRuleCall call) { + final Project project = call.rel(0); + RexInBetweenExpander expander = new RexInBetweenExpander( + project.getCluster().getRexBuilder()); + List newProjects = new ArrayList<>(); + for (RexNode expr : project.getProjects()) { + newProjects.add(expander.apply(expr)); + } + + if (!expander.modified) { + return; + } + + Project newProject = project.copy(project.getTraitSet(), + project.getInput(), newProjects, project.getRowType()); + + call.transformTo(newProject); + } + } + + + /** + * Class that transforms IN/BETWEEN clauses in an expression. + * If any call is modified, the modified flag will be set to + * true after its execution. + */ + private static class RexInBetweenExpander extends RexShuttle { + + private final RexBuilder rexBuilder; + private boolean modified; + + private RexInBetweenExpander(RexBuilder rexBuilder) { + this.rexBuilder = rexBuilder; + this.modified = false; + } + + @Override + public RexNode visitCall(final RexCall call) { + switch (call.getKind()) { + case AND: { + boolean[] modified = {false}; + List newOperands = visitList(call.operands, modified); + if (modified[0]) { + return RexUtil.composeConjunction(rexBuilder, newOperands); + } + return call; + } + case OR: { + boolean[] modified = {false}; + List newOperands = visitList(call.operands, modified); + if (modified[0]) { + return RexUtil.composeDisjunction(rexBuilder, newOperands); + } + return call; + } + case IN: { + List newOperands = RexNodeConverter.transformInToOrOperands( + call.getOperands(), rexBuilder); + if (newOperands == null) { + // We could not execute transformation, return expression + return call; + } + modified = true; + if (newOperands.size() > 1) { + return rexBuilder.makeCall(SqlStdOperatorTable.OR, newOperands); + } + return newOperands.get(0); + } + case BETWEEN: { + List newOperands = RexNodeConverter.rewriteBetweenChildren( + call.getOperands(), rexBuilder); + modified = true; + if (call.getOperands().get(0).isAlwaysTrue()) { + return rexBuilder.makeCall(SqlStdOperatorTable.OR, newOperands); + } + return rexBuilder.makeCall(SqlStdOperatorTable.AND, newOperands); + } + default: + return super.visitCall(call); + } + } + + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java index bd1c84bcc1..99dfbfcb74 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/translator/RexNodeConverter.java @@ -277,21 +277,12 @@ private RexNode convert(ExprNodeGenericFuncDesc func) throws SemanticException { childRexNodeLst = rewriteToDateChildren(childRexNodeLst, rexBuilder); } else if (calciteOp.getKind() == SqlKind.BETWEEN) { assert childRexNodeLst.get(0).isAlwaysTrue() || childRexNodeLst.get(0).isAlwaysFalse(); - boolean invert = childRexNodeLst.get(0).isAlwaysTrue(); - SqlBinaryOperator cmpOp; - if (invert) { + childRexNodeLst = rewriteBetweenChildren(childRexNodeLst, rexBuilder); + if (childRexNodeLst.get(0).isAlwaysTrue()) { calciteOp = SqlStdOperatorTable.OR; - cmpOp = SqlStdOperatorTable.GREATER_THAN; } else { calciteOp = SqlStdOperatorTable.AND; - cmpOp = SqlStdOperatorTable.LESS_THAN_OR_EQUAL; } - RexNode op = childRexNodeLst.get(1); - RexNode rangeL = childRexNodeLst.get(2); - RexNode rangeH = childRexNodeLst.get(3); - childRexNodeLst.clear(); - childRexNodeLst.add(rexBuilder.makeCall(cmpOp, rangeL, op)); - childRexNodeLst.add(rexBuilder.makeCall(cmpOp, op, rangeH)); } expr = rexBuilder.makeCall(retType, calciteOp, childRexNodeLst); } else { @@ -606,6 +597,23 @@ private static RexNode makeCast(SqlTypeName typeName, final RexNode child, RexBu return convertedChildList; } + public static List rewriteBetweenChildren(List childRexNodeLst, + RexBuilder rexBuilder) { + final List convertedChildList = Lists.newArrayList(); + SqlBinaryOperator cmpOp; + if (childRexNodeLst.get(0).isAlwaysTrue()) { + cmpOp = SqlStdOperatorTable.GREATER_THAN; + } else { + cmpOp = SqlStdOperatorTable.LESS_THAN_OR_EQUAL; + } + RexNode op = childRexNodeLst.get(1); + RexNode rangeL = childRexNodeLst.get(2); + RexNode rangeH = childRexNodeLst.get(3); + convertedChildList.add(rexBuilder.makeCall(cmpOp, rangeL, op)); + convertedChildList.add(rexBuilder.makeCall(cmpOp, op, rangeH)); + return convertedChildList; + } + private static boolean checkForStatefulFunctions(List list) { for (ExprNodeDesc node : list) { if (node instanceof ExprNodeGenericFuncDesc) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index 7b34e91139..bf083067da 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -212,6 +212,7 @@ import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterSetOpTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterSortPredicates; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterSortTransposeRule; +import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveInBetweenExpandRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveInsertExchange4JoinRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveIntersectMergeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveIntersectRewriteRule; @@ -2151,7 +2152,7 @@ public RelOptMaterialization apply(RelOptMaterialization materialization) { // There is a Project on top (due to nullability) final Project pq = (Project) viewScan; newViewScan = HiveProject.create(optCluster, copyNodeScan(pq.getInput()), - pq.getChildExps(), pq.getRowType(), Collections. emptyList()); + pq.getChildExps(), pq.getRowType(), Collections.emptyList()); } else { newViewScan = copyNodeScan(viewScan); } @@ -2192,10 +2193,19 @@ private RelNode copyNodeScan(RelNode scan) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); + // We need to expand IN/BETWEEN expressions when materialized view rewriting + // is triggered since otherwise this may prevent some rewritings from happening + HepProgramBuilder program = new HepProgramBuilder(); + generatePartialProgram(program, false, HepMatchOrder.DEPTH_FIRST, + HiveInBetweenExpandRule.FILTER_INSTANCE, + HiveInBetweenExpandRule.JOIN_INSTANCE, + HiveInBetweenExpandRule.PROJECT_INSTANCE); + basePlan = executeProgram(basePlan, program.build(), mdProvider, executorProvider); + if (mvRebuild) { // If it is a materialized view rebuild, we use the HepPlanner, since we only have // one MV and we would like to use it to create incremental maintenance plans - final HepProgramBuilder program = new HepProgramBuilder(); + program = new HepProgramBuilder(); generatePartialProgram(program, true, HepMatchOrder.TOP_DOWN, HiveMaterializedViewRule.MATERIALIZED_VIEW_REWRITING_RULES); // Add materialization for rebuild to planner @@ -2253,7 +2263,7 @@ private RelNode copyNodeScan(RelNode scan) { visitor.go(basePlan); if (visitor.isRewritingAllowed()) { // Trigger rewriting to remove UNION branch with MV - final HepProgramBuilder program = new HepProgramBuilder(); + program = new HepProgramBuilder(); if (visitor.isContainsAggregate()) { generatePartialProgram(program, false, HepMatchOrder.DEPTH_FIRST, HiveAggregateIncrementalRewritingRule.INSTANCE); @@ -2463,6 +2473,15 @@ private RelNode applyPostJoinOrderingTransform(RelNode basePlan, RelMetadataProv HiveProjectSortExchangeTransposeRule.INSTANCE, HiveProjectMergeRule.INSTANCE); } + // 10. We need to expand IN/BETWEEN expressions when loading a materialized view + // since otherwise this may prevent some rewritings from happening + if (ctx.isLoadingMaterializedView()) { + generatePartialProgram(program, false, HepMatchOrder.DEPTH_FIRST, + HiveInBetweenExpandRule.FILTER_INSTANCE, + HiveInBetweenExpandRule.JOIN_INSTANCE, + HiveInBetweenExpandRule.PROJECT_INSTANCE); + } + // Trigger program perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = executeProgram(basePlan, program.build(), mdProvider, executorProvider); diff --git a/ql/src/test/queries/clientpositive/materialized_view_rewrite_in_between.q b/ql/src/test/queries/clientpositive/materialized_view_rewrite_in_between.q new file mode 100644 index 0000000000..f769888f2c --- /dev/null +++ b/ql/src/test/queries/clientpositive/materialized_view_rewrite_in_between.q @@ -0,0 +1,61 @@ +SET hive.cli.errors.ignore=true; +SET hive.support.concurrency=true; +SET hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +SET metastore.strict.managed.tables=true; +SET hive.default.fileformat=textfile; +SET hive.default.fileformat.managed=orc; +SET metastore.create.as.acid=true; +SET hive.groupby.position.alias=true; + +drop database if exists expr2 cascade; +create database expr2; +use expr2; +create table sales(prod_id int, cust_id int, store_id int, sale_date timestamp, qty int, amt double, descr string); +insert into sales values +(11,1,101,'12/24/2013',1000,1234.00,'onedummytwo'); + +create materialized view mv1 stored as orc as (select prod_id, cust_id, store_id, sale_date, qty, amt, descr from sales where cust_id in (1,2,3,4,5)); +-- SAME ORDER +explain cbo +select prod_id, cust_id from sales where cust_id in (1,2,3,4,5); +-- DIFFERENT ORDER +explain cbo +select prod_id, cust_id from sales where cust_id in (5,1,2,3,4); + +drop materialized view mv1; + +drop database if exists in_pred cascade; +create database in_pred; +use in_pred; +create table census_pop (state string, year int, population bigint); +insert into census_pop values("AZ", 2010, 200), ("CA", 2011, 100), ("CA", 2010, 200), ("AZ", 2010, 100), ("NY", 2011, 121), ("AZ", 2011, 1000), ("OR", 2015, 1001), ("WA", 2016, 121), ("NJ", 2010, 500), ("NJ", 2010, 5000), ("AZ", 2014, 1004), ("TX", 2010, 1000), ("AZ", 2010, 1000), ("PT", 2017, 1200), ("NM", 2018, 120), ("CA", 2010, 200); + +create materialized view mv2 stored as orc as select state, year, sum(population) from census_pop where year IN (2010, 2018) group by state, year; +-- SAME +explain cbo +select state, year, sum(population) from census_pop where year IN (2010, 2018) group by state, year; +-- PARTIAL IN EQUALS +explain cbo +select state, year, sum(population) from census_pop where year = 2010 group by state, year; +-- PARTIAL +explain cbo +select state, year, sum(population) from census_pop where year in (2010) group by state, year; + +drop materialized view mv2; + +drop database if exists expr9 cascade; +create database expr9; +use expr9; +create table sales(prod_id int, cust_id int, store_id int, sale_date timestamp, qty int, amt double, descr string); +insert into sales values +(11,1,101,'12/24/2013',1000,1234.00,'onedummytwo'); + +create materialized view mv3 stored as orc as (select prod_id, cust_id, store_id, sale_date, qty, amt, descr from sales where cust_id >= 1 and prod_id < 31); +-- SAME +explain cbo +select * from sales where cust_id >= 1 and prod_id < 31; +-- BETWEEN AND RANGE +explain cbo +select * from sales where cust_id between 1 and 20 and prod_id < 31; + +drop materialized view mv3; diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_in_between.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_in_between.q.out new file mode 100644 index 0000000000..de303da574 --- /dev/null +++ b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_in_between.q.out @@ -0,0 +1,285 @@ +PREHOOK: query: drop database if exists expr2 cascade +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database if exists expr2 cascade +POSTHOOK: type: DROPDATABASE +PREHOOK: query: create database expr2 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:expr2 +POSTHOOK: query: create database expr2 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:expr2 +PREHOOK: query: use expr2 +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:expr2 +POSTHOOK: query: use expr2 +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:expr2 +PREHOOK: query: create table sales(prod_id int, cust_id int, store_id int, sale_date timestamp, qty int, amt double, descr string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:expr2 +PREHOOK: Output: expr2@sales +POSTHOOK: query: create table sales(prod_id int, cust_id int, store_id int, sale_date timestamp, qty int, amt double, descr string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:expr2 +POSTHOOK: Output: expr2@sales +PREHOOK: query: insert into sales values +(11,1,101,'12/24/2013',1000,1234.00,'onedummytwo') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: expr2@sales +POSTHOOK: query: insert into sales values +(11,1,101,'12/24/2013',1000,1234.00,'onedummytwo') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: expr2@sales +POSTHOOK: Lineage: sales.amt SCRIPT [] +POSTHOOK: Lineage: sales.cust_id SCRIPT [] +POSTHOOK: Lineage: sales.descr SCRIPT [] +POSTHOOK: Lineage: sales.prod_id SCRIPT [] +POSTHOOK: Lineage: sales.qty SCRIPT [] +POSTHOOK: Lineage: sales.sale_date SCRIPT [] +POSTHOOK: Lineage: sales.store_id SCRIPT [] +PREHOOK: query: create materialized view mv1 stored as orc as (select prod_id, cust_id, store_id, sale_date, qty, amt, descr from sales where cust_id in (1,2,3,4,5)) +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: expr2@sales +PREHOOK: Output: database:expr2 +PREHOOK: Output: expr2@mv1 +POSTHOOK: query: create materialized view mv1 stored as orc as (select prod_id, cust_id, store_id, sale_date, qty, amt, descr from sales where cust_id in (1,2,3,4,5)) +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: expr2@sales +POSTHOOK: Output: database:expr2 +POSTHOOK: Output: expr2@mv1 +PREHOOK: query: explain cbo +select prod_id, cust_id from sales where cust_id in (1,2,3,4,5) +PREHOOK: type: QUERY +PREHOOK: Input: expr2@mv1 +PREHOOK: Input: expr2@sales +#### A masked pattern was here #### +POSTHOOK: query: explain cbo +select prod_id, cust_id from sales where cust_id in (1,2,3,4,5) +POSTHOOK: type: QUERY +POSTHOOK: Input: expr2@mv1 +POSTHOOK: Input: expr2@sales +#### A masked pattern was here #### +CBO PLAN: +HiveProject(prod_id=[$0], cust_id=[$1]) + HiveTableScan(table=[[expr2, mv1]], table:alias=[expr2.mv1]) + +PREHOOK: query: explain cbo +select prod_id, cust_id from sales where cust_id in (5,1,2,3,4) +PREHOOK: type: QUERY +PREHOOK: Input: expr2@mv1 +PREHOOK: Input: expr2@sales +#### A masked pattern was here #### +POSTHOOK: query: explain cbo +select prod_id, cust_id from sales where cust_id in (5,1,2,3,4) +POSTHOOK: type: QUERY +POSTHOOK: Input: expr2@mv1 +POSTHOOK: Input: expr2@sales +#### A masked pattern was here #### +CBO PLAN: +HiveProject(prod_id=[$0], cust_id=[$1]) + HiveTableScan(table=[[expr2, mv1]], table:alias=[expr2.mv1]) + +PREHOOK: query: drop materialized view mv1 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: expr2@mv1 +PREHOOK: Output: expr2@mv1 +POSTHOOK: query: drop materialized view mv1 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: expr2@mv1 +POSTHOOK: Output: expr2@mv1 +PREHOOK: query: drop database if exists in_pred cascade +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database if exists in_pred cascade +POSTHOOK: type: DROPDATABASE +PREHOOK: query: create database in_pred +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:in_pred +POSTHOOK: query: create database in_pred +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:in_pred +PREHOOK: query: use in_pred +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:in_pred +POSTHOOK: query: use in_pred +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:in_pred +PREHOOK: query: create table census_pop (state string, year int, population bigint) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:in_pred +PREHOOK: Output: in_pred@census_pop +POSTHOOK: query: create table census_pop (state string, year int, population bigint) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:in_pred +POSTHOOK: Output: in_pred@census_pop +PREHOOK: query: insert into census_pop values("AZ", 2010, 200), ("CA", 2011, 100), ("CA", 2010, 200), ("AZ", 2010, 100), ("NY", 2011, 121), ("AZ", 2011, 1000), ("OR", 2015, 1001), ("WA", 2016, 121), ("NJ", 2010, 500), ("NJ", 2010, 5000), ("AZ", 2014, 1004), ("TX", 2010, 1000), ("AZ", 2010, 1000), ("PT", 2017, 1200), ("NM", 2018, 120), ("CA", 2010, 200) +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: in_pred@census_pop +POSTHOOK: query: insert into census_pop values("AZ", 2010, 200), ("CA", 2011, 100), ("CA", 2010, 200), ("AZ", 2010, 100), ("NY", 2011, 121), ("AZ", 2011, 1000), ("OR", 2015, 1001), ("WA", 2016, 121), ("NJ", 2010, 500), ("NJ", 2010, 5000), ("AZ", 2014, 1004), ("TX", 2010, 1000), ("AZ", 2010, 1000), ("PT", 2017, 1200), ("NM", 2018, 120), ("CA", 2010, 200) +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: in_pred@census_pop +POSTHOOK: Lineage: census_pop.population SCRIPT [] +POSTHOOK: Lineage: census_pop.state SCRIPT [] +POSTHOOK: Lineage: census_pop.year SCRIPT [] +PREHOOK: query: create materialized view mv2 stored as orc as select state, year, sum(population) from census_pop where year IN (2010, 2018) group by state, year +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: in_pred@census_pop +PREHOOK: Output: database:in_pred +PREHOOK: Output: in_pred@mv2 +POSTHOOK: query: create materialized view mv2 stored as orc as select state, year, sum(population) from census_pop where year IN (2010, 2018) group by state, year +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: in_pred@census_pop +POSTHOOK: Output: database:in_pred +POSTHOOK: Output: in_pred@mv2 +PREHOOK: query: explain cbo +select state, year, sum(population) from census_pop where year IN (2010, 2018) group by state, year +PREHOOK: type: QUERY +PREHOOK: Input: in_pred@census_pop +PREHOOK: Input: in_pred@mv2 +#### A masked pattern was here #### +POSTHOOK: query: explain cbo +select state, year, sum(population) from census_pop where year IN (2010, 2018) group by state, year +POSTHOOK: type: QUERY +POSTHOOK: Input: in_pred@census_pop +POSTHOOK: Input: in_pred@mv2 +#### A masked pattern was here #### +CBO PLAN: +HiveTableScan(table=[[in_pred, mv2]], table:alias=[in_pred.mv2]) + +PREHOOK: query: explain cbo +select state, year, sum(population) from census_pop where year = 2010 group by state, year +PREHOOK: type: QUERY +PREHOOK: Input: in_pred@census_pop +PREHOOK: Input: in_pred@mv2 +#### A masked pattern was here #### +POSTHOOK: query: explain cbo +select state, year, sum(population) from census_pop where year = 2010 group by state, year +POSTHOOK: type: QUERY +POSTHOOK: Input: in_pred@census_pop +POSTHOOK: Input: in_pred@mv2 +#### A masked pattern was here #### +CBO PLAN: +HiveProject(state=[$0], $f1=[CAST(2010):INTEGER], $f10=[$1]) + HiveAggregate(group=[{0}], agg#0=[sum($2)]) + HiveFilter(condition=[=(2010, $1)]) + HiveTableScan(table=[[in_pred, mv2]], table:alias=[in_pred.mv2]) + +PREHOOK: query: explain cbo +select state, year, sum(population) from census_pop where year in (2010) group by state, year +PREHOOK: type: QUERY +PREHOOK: Input: in_pred@census_pop +PREHOOK: Input: in_pred@mv2 +#### A masked pattern was here #### +POSTHOOK: query: explain cbo +select state, year, sum(population) from census_pop where year in (2010) group by state, year +POSTHOOK: type: QUERY +POSTHOOK: Input: in_pred@census_pop +POSTHOOK: Input: in_pred@mv2 +#### A masked pattern was here #### +CBO PLAN: +HiveProject(state=[$0], $f1=[CAST(2010):INTEGER], $f10=[$1]) + HiveAggregate(group=[{0}], agg#0=[sum($2)]) + HiveFilter(condition=[=(2010, $1)]) + HiveTableScan(table=[[in_pred, mv2]], table:alias=[in_pred.mv2]) + +PREHOOK: query: drop materialized view mv2 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: in_pred@mv2 +PREHOOK: Output: in_pred@mv2 +POSTHOOK: query: drop materialized view mv2 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: in_pred@mv2 +POSTHOOK: Output: in_pred@mv2 +PREHOOK: query: drop database if exists expr9 cascade +PREHOOK: type: DROPDATABASE +POSTHOOK: query: drop database if exists expr9 cascade +POSTHOOK: type: DROPDATABASE +PREHOOK: query: create database expr9 +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:expr9 +POSTHOOK: query: create database expr9 +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:expr9 +PREHOOK: query: use expr9 +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:expr9 +POSTHOOK: query: use expr9 +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:expr9 +PREHOOK: query: create table sales(prod_id int, cust_id int, store_id int, sale_date timestamp, qty int, amt double, descr string) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:expr9 +PREHOOK: Output: expr9@sales +POSTHOOK: query: create table sales(prod_id int, cust_id int, store_id int, sale_date timestamp, qty int, amt double, descr string) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:expr9 +POSTHOOK: Output: expr9@sales +PREHOOK: query: insert into sales values +(11,1,101,'12/24/2013',1000,1234.00,'onedummytwo') +PREHOOK: type: QUERY +PREHOOK: Input: _dummy_database@_dummy_table +PREHOOK: Output: expr9@sales +POSTHOOK: query: insert into sales values +(11,1,101,'12/24/2013',1000,1234.00,'onedummytwo') +POSTHOOK: type: QUERY +POSTHOOK: Input: _dummy_database@_dummy_table +POSTHOOK: Output: expr9@sales +POSTHOOK: Lineage: sales.amt SCRIPT [] +POSTHOOK: Lineage: sales.cust_id SCRIPT [] +POSTHOOK: Lineage: sales.descr SCRIPT [] +POSTHOOK: Lineage: sales.prod_id SCRIPT [] +POSTHOOK: Lineage: sales.qty SCRIPT [] +POSTHOOK: Lineage: sales.sale_date SCRIPT [] +POSTHOOK: Lineage: sales.store_id SCRIPT [] +PREHOOK: query: create materialized view mv3 stored as orc as (select prod_id, cust_id, store_id, sale_date, qty, amt, descr from sales where cust_id >= 1 and prod_id < 31) +PREHOOK: type: CREATE_MATERIALIZED_VIEW +PREHOOK: Input: expr9@sales +PREHOOK: Output: database:expr9 +PREHOOK: Output: expr9@mv3 +POSTHOOK: query: create materialized view mv3 stored as orc as (select prod_id, cust_id, store_id, sale_date, qty, amt, descr from sales where cust_id >= 1 and prod_id < 31) +POSTHOOK: type: CREATE_MATERIALIZED_VIEW +POSTHOOK: Input: expr9@sales +POSTHOOK: Output: database:expr9 +POSTHOOK: Output: expr9@mv3 +PREHOOK: query: explain cbo +select * from sales where cust_id >= 1 and prod_id < 31 +PREHOOK: type: QUERY +PREHOOK: Input: expr9@mv3 +PREHOOK: Input: expr9@sales +#### A masked pattern was here #### +POSTHOOK: query: explain cbo +select * from sales where cust_id >= 1 and prod_id < 31 +POSTHOOK: type: QUERY +POSTHOOK: Input: expr9@mv3 +POSTHOOK: Input: expr9@sales +#### A masked pattern was here #### +CBO PLAN: +HiveTableScan(table=[[expr9, mv3]], table:alias=[expr9.mv3]) + +PREHOOK: query: explain cbo +select * from sales where cust_id between 1 and 20 and prod_id < 31 +PREHOOK: type: QUERY +PREHOOK: Input: expr9@mv3 +PREHOOK: Input: expr9@sales +#### A masked pattern was here #### +POSTHOOK: query: explain cbo +select * from sales where cust_id between 1 and 20 and prod_id < 31 +POSTHOOK: type: QUERY +POSTHOOK: Input: expr9@mv3 +POSTHOOK: Input: expr9@sales +#### A masked pattern was here #### +CBO PLAN: +HiveFilter(condition=[>=(20, $1)]) + HiveTableScan(table=[[expr9, mv3]], table:alias=[expr9.mv3]) + +PREHOOK: query: drop materialized view mv3 +PREHOOK: type: DROP_MATERIALIZED_VIEW +PREHOOK: Input: expr9@mv3 +PREHOOK: Output: expr9@mv3 +POSTHOOK: query: drop materialized view mv3 +POSTHOOK: type: DROP_MATERIALIZED_VIEW +POSTHOOK: Input: expr9@mv3 +POSTHOOK: Output: expr9@mv3 diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_ssb.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_ssb.q.out index 2fd835bdb2..0b996dc78d 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_ssb.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_ssb.q.out @@ -646,10 +646,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv_n0 - filterExpr: ((lo_quantity < 25.0D) and (d_year = 1993) and lo_discount BETWEEN 1.0D AND 3.0D) (type: boolean) + filterExpr: ((lo_quantity < 25.0D) and lo_discount BETWEEN 1.0D AND 3.0D and (d_year = 1993)) (type: boolean) Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((lo_quantity < 25.0D) and (d_year = 1993) and lo_discount BETWEEN 1.0D AND 3.0D) (type: boolean) + predicate: ((lo_quantity < 25.0D) and lo_discount BETWEEN 1.0D AND 3.0D and (d_year = 1993)) (type: boolean) Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: discounted_price (type: double) @@ -736,10 +736,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv_n0 - filterExpr: ((d_yearmonthnum = 199401) and lo_discount BETWEEN 4.0D AND 6.0D and lo_quantity BETWEEN 26.0D AND 35.0D) (type: boolean) + filterExpr: (lo_quantity BETWEEN 26.0D AND 35.0D and lo_discount BETWEEN 4.0D AND 6.0D and (d_yearmonthnum = 199401)) (type: boolean) Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((d_yearmonthnum = 199401) and lo_discount BETWEEN 4.0D AND 6.0D and lo_quantity BETWEEN 26.0D AND 35.0D) (type: boolean) + predicate: (lo_quantity BETWEEN 26.0D AND 35.0D and lo_discount BETWEEN 4.0D AND 6.0D and (d_yearmonthnum = 199401)) (type: boolean) Statistics: Num rows: 1 Data size: 28 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: discounted_price (type: double) @@ -828,10 +828,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv_n0 - filterExpr: ((d_year = 1994) and (d_weeknuminyear = 6) and lo_discount BETWEEN 5.0D AND 7.0D and lo_quantity BETWEEN 26.0D AND 35.0D) (type: boolean) + filterExpr: (lo_quantity BETWEEN 26.0D AND 35.0D and lo_discount BETWEEN 5.0D AND 7.0D and (d_year = 1994) and (d_weeknuminyear = 6)) (type: boolean) Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((d_year = 1994) and (d_weeknuminyear = 6) and lo_discount BETWEEN 5.0D AND 7.0D and lo_quantity BETWEEN 26.0D AND 35.0D) (type: boolean) + predicate: (lo_quantity BETWEEN 26.0D AND 35.0D and lo_discount BETWEEN 5.0D AND 7.0D and (d_year = 1994) and (d_weeknuminyear = 6)) (type: boolean) Statistics: Num rows: 1 Data size: 32 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: discounted_price (type: double) @@ -1059,10 +1059,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv_n0 - filterExpr: ((s_region = 'ASIA') and p_brand1 BETWEEN 'MFGR#2221' AND 'MFGR#2228') (type: boolean) + filterExpr: (p_brand1 BETWEEN 'MFGR#2221' AND 'MFGR#2228' and (s_region = 'ASIA')) (type: boolean) Statistics: Num rows: 1 Data size: 180 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((s_region = 'ASIA') and p_brand1 BETWEEN 'MFGR#2221' AND 'MFGR#2228') (type: boolean) + predicate: (p_brand1 BETWEEN 'MFGR#2221' AND 'MFGR#2228' and (s_region = 'ASIA')) (type: boolean) Statistics: Num rows: 1 Data size: 180 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: d_year (type: int), p_brand1 (type: string), lo_revenue (type: double) @@ -1315,10 +1315,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv_n0 - filterExpr: ((c_region = 'ASIA') and (s_region = 'ASIA') and d_year BETWEEN 1992 AND 1997) (type: boolean) + filterExpr: (d_year BETWEEN 1992 AND 1997 and (c_region = 'ASIA') and (s_region = 'ASIA')) (type: boolean) Statistics: Num rows: 1 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((c_region = 'ASIA') and (s_region = 'ASIA') and d_year BETWEEN 1992 AND 1997) (type: boolean) + predicate: (d_year BETWEEN 1992 AND 1997 and (c_region = 'ASIA') and (s_region = 'ASIA')) (type: boolean) Statistics: Num rows: 1 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: c_nation (type: string), d_year (type: int), s_nation (type: string), lo_revenue (type: double) @@ -1443,10 +1443,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv_n0 - filterExpr: ((c_nation = 'UNITED STATES') and (s_nation = 'UNITED STATES') and d_year BETWEEN 1992 AND 1997) (type: boolean) + filterExpr: (d_year BETWEEN 1992 AND 1997 and (c_nation = 'UNITED STATES') and (s_nation = 'UNITED STATES')) (type: boolean) Statistics: Num rows: 1 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((c_nation = 'UNITED STATES') and (s_nation = 'UNITED STATES') and d_year BETWEEN 1992 AND 1997) (type: boolean) + predicate: (d_year BETWEEN 1992 AND 1997 and (c_nation = 'UNITED STATES') and (s_nation = 'UNITED STATES')) (type: boolean) Statistics: Num rows: 1 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: c_city (type: string), d_year (type: int), s_city (type: string), lo_revenue (type: double) diff --git a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_ssb_2.q.out b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_ssb_2.q.out index 55c5a22ee1..e6f126091e 100644 --- a/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_ssb_2.q.out +++ b/ql/src/test/results/clientpositive/llap/materialized_view_rewrite_ssb_2.q.out @@ -648,10 +648,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv - filterExpr: ((UDFToDouble(lo_quantity) < 25.0D) and (UDFToInteger(d_year) = 1993) and UDFToDouble(lo_discount) BETWEEN 1.0D AND 3.0D) (type: boolean) + filterExpr: ((UDFToDouble(lo_quantity) < 25.0D) and UDFToDouble(lo_discount) BETWEEN 1.0D AND 3.0D and (UDFToInteger(d_year) = 1993)) (type: boolean) Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((UDFToDouble(lo_quantity) < 25.0D) and (UDFToInteger(d_year) = 1993) and UDFToDouble(lo_discount) BETWEEN 1.0D AND 3.0D) (type: boolean) + predicate: ((UDFToDouble(lo_quantity) < 25.0D) and UDFToDouble(lo_discount) BETWEEN 1.0D AND 3.0D and (UDFToInteger(d_year) = 1993)) (type: boolean) Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: discounted_price (type: double) @@ -738,10 +738,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv - filterExpr: ((UDFToInteger(d_yearmonthnum) = 199401) and UDFToDouble(lo_discount) BETWEEN 4.0D AND 6.0D and UDFToDouble(lo_quantity) BETWEEN 26.0D AND 35.0D) (type: boolean) + filterExpr: (UDFToDouble(lo_quantity) BETWEEN 26.0D AND 35.0D and UDFToDouble(lo_discount) BETWEEN 4.0D AND 6.0D and (UDFToInteger(d_yearmonthnum) = 199401)) (type: boolean) Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((UDFToInteger(d_yearmonthnum) = 199401) and UDFToDouble(lo_discount) BETWEEN 4.0D AND 6.0D and UDFToDouble(lo_quantity) BETWEEN 26.0D AND 35.0D) (type: boolean) + predicate: (UDFToDouble(lo_quantity) BETWEEN 26.0D AND 35.0D and UDFToDouble(lo_discount) BETWEEN 4.0D AND 6.0D and (UDFToInteger(d_yearmonthnum) = 199401)) (type: boolean) Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: discounted_price (type: double) @@ -830,10 +830,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv - filterExpr: ((UDFToInteger(d_year) = 1994) and (UDFToInteger(d_weeknuminyear) = 6) and UDFToDouble(lo_discount) BETWEEN 5.0D AND 7.0D and UDFToDouble(lo_quantity) BETWEEN 26.0D AND 35.0D) (type: boolean) + filterExpr: (UDFToDouble(lo_quantity) BETWEEN 26.0D AND 35.0D and UDFToDouble(lo_discount) BETWEEN 5.0D AND 7.0D and (UDFToInteger(d_year) = 1994) and (UDFToInteger(d_weeknuminyear) = 6)) (type: boolean) Statistics: Num rows: 1 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((UDFToInteger(d_year) = 1994) and (UDFToInteger(d_weeknuminyear) = 6) and UDFToDouble(lo_discount) BETWEEN 5.0D AND 7.0D and UDFToDouble(lo_quantity) BETWEEN 26.0D AND 35.0D) (type: boolean) + predicate: (UDFToDouble(lo_quantity) BETWEEN 26.0D AND 35.0D and UDFToDouble(lo_discount) BETWEEN 5.0D AND 7.0D and (UDFToInteger(d_year) = 1994) and (UDFToInteger(d_weeknuminyear) = 6)) (type: boolean) Statistics: Num rows: 1 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: discounted_price (type: double) @@ -1061,10 +1061,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv - filterExpr: ((s_region = 'ASIA') and p_brand1 BETWEEN 'MFGR#2221' AND 'MFGR#2228') (type: boolean) + filterExpr: (p_brand1 BETWEEN 'MFGR#2221' AND 'MFGR#2228' and (s_region = 'ASIA')) (type: boolean) Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((s_region = 'ASIA') and p_brand1 BETWEEN 'MFGR#2221' AND 'MFGR#2228') (type: boolean) + predicate: (p_brand1 BETWEEN 'MFGR#2221' AND 'MFGR#2228' and (s_region = 'ASIA')) (type: boolean) Statistics: Num rows: 1 Data size: 260 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: UDFToInteger(d_year) (type: int), p_brand1 (type: string), lo_revenue (type: double) @@ -1317,10 +1317,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv - filterExpr: ((c_region = 'ASIA') and (s_region = 'ASIA') and UDFToInteger(d_year) BETWEEN 1992 AND 1997) (type: boolean) + filterExpr: (UDFToInteger(d_year) BETWEEN 1992 AND 1997 and (c_region = 'ASIA') and (s_region = 'ASIA')) (type: boolean) Statistics: Num rows: 1 Data size: 428 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((c_region = 'ASIA') and (s_region = 'ASIA') and UDFToInteger(d_year) BETWEEN 1992 AND 1997) (type: boolean) + predicate: (UDFToInteger(d_year) BETWEEN 1992 AND 1997 and (c_region = 'ASIA') and (s_region = 'ASIA')) (type: boolean) Statistics: Num rows: 1 Data size: 428 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: c_nation (type: string), s_nation (type: string), UDFToInteger(d_year) (type: int), lo_revenue (type: double) @@ -1445,10 +1445,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: default.ssb_mv - filterExpr: ((c_nation = 'UNITED STATES') and (s_nation = 'UNITED STATES') and UDFToInteger(d_year) BETWEEN 1992 AND 1997) (type: boolean) + filterExpr: (UDFToInteger(d_year) BETWEEN 1992 AND 1997 and (c_nation = 'UNITED STATES') and (s_nation = 'UNITED STATES')) (type: boolean) Statistics: Num rows: 1 Data size: 428 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator - predicate: ((c_nation = 'UNITED STATES') and (s_nation = 'UNITED STATES') and UDFToInteger(d_year) BETWEEN 1992 AND 1997) (type: boolean) + predicate: (UDFToInteger(d_year) BETWEEN 1992 AND 1997 and (c_nation = 'UNITED STATES') and (s_nation = 'UNITED STATES')) (type: boolean) Statistics: Num rows: 1 Data size: 428 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: c_city (type: string), s_city (type: string), UDFToInteger(d_year) (type: int), lo_revenue (type: double)