diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java index 682b987..e839be3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java @@ -262,12 +262,6 @@ private void runDynamicPartitionPruning(OptimizeSparkProcContext procCtx) List topNodes = new ArrayList(); topNodes.addAll(parseContext.getTopOps().values()); ogw.startWalking(topNodes, null); - - // need a new run of the constant folding because we might have created lots - // of "and true and true" conditions. - if(procCtx.getConf().getBoolVar(HiveConf.ConfVars.HIVEOPTCONSTANTPROPAGATION)) { - new ConstantPropagate().transform(parseContext); - } } private void runSetReducerParallelism(OptimizeSparkProcContext procCtx) throws SemanticException {