diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java index a4484ec..07a244d 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveJoinAddNotNullRule.java @@ -30,12 +30,11 @@ import org.apache.calcite.rel.core.Join; import org.apache.calcite.rel.core.JoinRelType; import org.apache.calcite.rel.core.RelFactories.FilterFactory; +import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexBuilder; -import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; -import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; @@ -142,12 +141,9 @@ public void onMatch(RelOptRuleCall call) { final RelDataType returnType = cluster.getTypeFactory(). createSqlType(SqlTypeName.BOOLEAN); - final Map newConditions; - if (input instanceof HiveFilter) { - newConditions = splitCondition(((HiveFilter) input).getCondition()); - } - else { - newConditions = new HashMap(); + final Map newConditions = new HashMap(); + for (RexNode node : RelMetadataQuery.getPulledUpPredicates(input).pulledUpPredicates) { + newConditions.put(node.toString(), node); } for (int pos : inputKeyPositions) { try { @@ -175,20 +171,7 @@ public void onMatch(RelOptRuleCall call) { } return newConditions; } - - private static Map splitCondition(RexNode condition) { - Map newConditions = new HashMap(); - if (condition.getKind() == SqlKind.AND) { - for (RexNode node : ((RexCall) condition).getOperands()) { - newConditions.put(node.toString(), node); - } - } - else { - newConditions.put(condition.toString(), condition); - } - return newConditions; - } - + private static RelNode createHiveFilterConjunctiveCondition(FilterFactory filterFactory, RexBuilder rexBuilder, RelNode input, Collection conditions) { final RexNode newCondition = RexUtil.composeConjunction(rexBuilder, conditions, false); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java index dccb598..ae5e56e 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java @@ -508,6 +508,9 @@ public static boolean prunePartitionNames(List partColumnNames, boolean isUnknown = (isNeeded == null); if (!isUnknown && !isNeeded) { partIter.remove(); + if (LOG.isDebugEnabled()) { + LOG.debug("skipping partition: " + partName); + } continue; } if (isUnknown && values.contains(defaultPartitionName)) { diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java index a73e24e..eeddcbb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java @@ -945,12 +945,7 @@ private RelNode applyPreJoinOrderingTransforms(RelNode basePlan, RelMetadataProv basePlan = hepPlan(basePlan, true, mdProvider, SemiJoinJoinTransposeRule.INSTANCE, SemiJoinFilterTransposeRule.INSTANCE, SemiJoinProjectTransposeRule.INSTANCE); - // 2. Add not null filters - if (conf.getBoolVar(HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP)) { - basePlan = hepPlan(basePlan, true, mdProvider, HiveJoinAddNotNullRule.INSTANCE); - } - - // 3. Constant propagation, common filter extraction, and PPD + // 2. Constant propagation, common filter extraction, and PPD basePlan = hepPlan(basePlan, true, mdProvider, ReduceExpressionsRule.PROJECT_INSTANCE, ReduceExpressionsRule.FILTER_INSTANCE, @@ -961,22 +956,23 @@ private RelNode applyPreJoinOrderingTransforms(RelNode basePlan, RelMetadataProv new HiveFilterSetOpTransposeRule(HiveFilter.DEFAULT_FILTER_FACTORY), HiveFilterJoinRule.JOIN, HiveFilterJoinRule.FILTER_ON_JOIN, + HiveJoinAddNotNullRule.INSTANCE, new FilterAggregateTransposeRule(Filter.class, HiveFilter.DEFAULT_FILTER_FACTORY, Aggregate.class)); - // 4. Transitive inference & Partition Pruning - basePlan = hepPlan(basePlan, false, mdProvider, new JoinPushTransitivePredicatesRule( - Join.class, HiveFilter.DEFAULT_FILTER_FACTORY), + // 3. Transitive inference & Partition Pruning + basePlan = hepPlan(basePlan, false, mdProvider, + new JoinPushTransitivePredicatesRule(Join.class, HiveFilter.DEFAULT_FILTER_FACTORY), new HivePartitionPruneRule(conf)); - // 5. Projection Pruning + // 4. Projection Pruning HiveRelFieldTrimmer fieldTrimmer = new HiveRelFieldTrimmer(null, HiveProject.DEFAULT_PROJECT_FACTORY, HiveFilter.DEFAULT_FILTER_FACTORY, HiveJoin.HIVE_JOIN_FACTORY, HiveSemiJoin.HIVE_SEMIJOIN_FACTORY, HiveSort.HIVE_SORT_REL_FACTORY, HiveAggregate.HIVE_AGGR_REL_FACTORY, HiveUnion.UNION_REL_FACTORY); basePlan = fieldTrimmer.trim(basePlan); - // 6. Rerun PPD through Project as column pruning would have introduced DT + // 5. Rerun PPD through Project as column pruning would have introduced DT // above scans basePlan = hepPlan(basePlan, true, mdProvider, new FilterProjectTransposeRule(Filter.class, HiveFilter.DEFAULT_FILTER_FACTORY,