diff --git common/src/java/org/apache/hadoop/hive/conf/HiveConf.java common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 593c566..a642b39 100644
--- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1627,6 +1627,8 @@
HIVE_VECTORIZATION_GROUPBY_FLUSH_PERCENT("hive.vectorized.groupby.flush.percent", (float) 0.1,
"Percent of entries in the group by aggregation hash flushed when the memory threshold is exceeded."),
+ // Constant propagation optimizer
+ HIVEOPTCONSTANTPROPAGATION("hive.optimize.constant.propagation", true, "Whether to enable constant propagation optimizer"),
HIVE_TYPE_CHECK_ON_INSERT("hive.typecheck.on.insert", true, ""),
diff --git conf/hive-default.xml.template conf/hive-default.xml.template
index ba922d0..fc6af4a 100644
--- conf/hive-default.xml.template
+++ conf/hive-default.xml.template
@@ -2899,6 +2899,11 @@
Percent of entries in the group by aggregation hash flushed when the memory threshold is exceeded.
+ hive.optimize.constant.propagation
+ true
+ Whether to enable constant propagation optimizer
+
+
hive.typecheck.on.insert
true
diff --git contrib/src/test/results/clientpositive/udf_example_add.q.out contrib/src/test/results/clientpositive/udf_example_add.q.out
index 4510ba4..5a2d37a 100644
--- contrib/src/test/results/clientpositive/udf_example_add.q.out
+++ contrib/src/test/results/clientpositive/udf_example_add.q.out
@@ -36,7 +36,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: example_add(1, 2) (type: int), example_add(1, 2, 3) (type: int), example_add(1, 2, 3, 4) (type: int), example_add(1.1, 2.2) (type: double), example_add(1.1, 2.2, 3.3) (type: double), example_add(1.1, 2.2, 3.3, 4.4) (type: double), example_add(1, 2, 3, 4.4) (type: double)
+ expressions: 3 (type: int), 6 (type: int), 10 (type: int), 3.3000000000000003 (type: double), 6.6 (type: double), 11.0 (type: double), 10.4 (type: double)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
diff --git contrib/src/test/results/clientpositive/udf_example_format.q.out contrib/src/test/results/clientpositive/udf_example_format.q.out
index 83e508a..24e3670 100644
--- contrib/src/test/results/clientpositive/udf_example_format.q.out
+++ contrib/src/test/results/clientpositive/udf_example_format.q.out
@@ -30,7 +30,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: example_format('abc') (type: string), example_format('%1$s', 1.1) (type: string), example_format('%1$s %2$e', 1.1, 1.2) (type: string), example_format('%1$x %2$o %3$d', 10, 10, 10) (type: string)
+ expressions: 'abc' (type: string), '1.1' (type: string), '1.1 1.200000e+00' (type: string), 'a 12 10' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
diff --git hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
index 447a7cd..399a1f3 100644
--- hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
+++ hbase-handler/src/test/results/positive/hbase_ppd_key_range.q.out
@@ -193,10 +193,10 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: hbase_pushdown
- filterExpr: (key >= UDFToString((40 + 50))) (type: boolean)
+ filterExpr: (key >= '90') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key >= UDFToString((40 + 50))) (type: boolean)
+ predicate: (key >= '90') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string)
diff --git hbase-handler/src/test/results/positive/hbase_pushdown.q.out hbase-handler/src/test/results/positive/hbase_pushdown.q.out
index 7bb1f5e..93a7ae4 100644
--- hbase-handler/src/test/results/positive/hbase_pushdown.q.out
+++ hbase-handler/src/test/results/positive/hbase_pushdown.q.out
@@ -43,7 +43,7 @@ STAGE PLANS:
predicate: (key = 90) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: 90 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -235,7 +235,7 @@ STAGE PLANS:
predicate: (((key = 80) and (key = 90)) and (value like '%90%')) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: 90 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -398,7 +398,7 @@ STAGE PLANS:
predicate: (key = 90) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: 90 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git hbase-handler/src/test/results/positive/ppd_key_ranges.q.out hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
index 1509b0b..7e77cb3 100644
--- hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
+++ hbase-handler/src/test/results/positive/ppd_key_ranges.q.out
@@ -191,7 +191,7 @@ STAGE PLANS:
predicate: (((key >= 9) and (key < 17)) and (key = 11)) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: 11 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
index feb8558..e40da12 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ColumnInfo.java
@@ -220,4 +220,8 @@ public boolean equals(Object obj) {
return true;
}
+
+ public void setObjectinspector(ObjectInspector writableObjectInspector) {
+ this.objectInspector = writableObjectInspector;
+ }
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
index ac94959..b8d6ab7 100755
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ExprNodeEvaluator.java
@@ -104,4 +104,9 @@ public boolean isStateful() {
public ExprNodeEvaluator[] getChildren() {
return null;
}
+
+ @Override
+ public String toString() {
+ return "ExprNodeEvaluator[" + expr + "]";
+ }
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
index e877cd4..af835fa 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java
@@ -131,7 +131,7 @@ protected void initializeOp(Configuration hconf) throws HiveException {
int bigPos = conf.getPosBigTable();
List valueOI = new ArrayList();
for (int i = 0; i < valueIndex.length; i++) {
- if (valueIndex[i] >= 0) {
+ if (valueIndex[i] >= 0 && !joinKeysObjectInspectors[bigPos].isEmpty()) {
valueOI.add(joinKeysObjectInspectors[bigPos].get(valueIndex[i]));
} else {
valueOI.add(inspectors.get(i));
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
index 28c977a..6d9f5e4 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/ReduceSinkOperator.java
@@ -272,6 +272,7 @@ public void processOp(Object row, int tag) throws HiveException {
// TODO: this is fishy - we init object inspectors based on first tag. We
// should either init for each tag, or if rowInspector doesn't really
// matter, then we can create this in ctor and get rid of firstRow.
+ LOG.info("keys are " + conf.getOutputKeyColumnNames() + " num distributions: " + conf.getNumDistributionKeys());
keyObjectInspector = initEvaluatorsAndReturnStruct(keyEval,
distinctColIndices,
conf.getOutputKeyColumnNames(), numDistributionKeys, rowInspector);
diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
index c7e781d..42b546b 100644
--- ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
+++ ql/src/java/org/apache/hadoop/hive/ql/exec/SelectOperator.java
@@ -32,8 +32,7 @@
/**
* Select operator implementation.
*/
-public class SelectOperator extends Operator implements
- Serializable {
+public class SelectOperator extends Operator implements Serializable {
private static final long serialVersionUID = 1L;
protected transient ExprNodeEvaluator[] eval;
@@ -60,10 +59,9 @@ protected void initializeOp(Configuration hconf) throws HiveException {
}
}
output = new Object[eval.length];
- LOG.info("SELECT "
- + ((StructObjectInspector) inputObjInspectors[0]).getTypeName());
- outputObjInspector = initEvaluatorsAndReturnStruct(eval, conf
- .getOutputColumnNames(), inputObjInspectors[0]);
+ LOG.info("SELECT " + ((StructObjectInspector) inputObjInspectors[0]).getTypeName());
+ outputObjInspector = initEvaluatorsAndReturnStruct(eval, conf.getOutputColumnNames(),
+ inputObjInspectors[0]);
initializeChildren(hconf);
}
@@ -81,8 +79,7 @@ public void processOp(Object row, int tag) throws HiveException {
} catch (HiveException e) {
throw e;
} catch (RuntimeException e) {
- throw new HiveException("Error evaluating "
- + conf.getColList().get(i).getExprString(), e);
+ throw new HiveException("Error evaluating " + conf.getColList().get(i).getExprString(), e);
}
forward(output, outputObjInspector);
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
index cb9e2f8..f951e1c 100644
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
@@ -30,6 +30,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.AbstractMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.ColumnInfo;
import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
import org.apache.hadoop.hive.ql.exec.FilterOperator;
@@ -87,6 +88,7 @@ private ColumnPrunerProcFactory() {
* Node Processor for Column Pruning on Filter Operators.
*/
public static class ColumnPrunerFilterProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
FilterOperator op = (FilterOperator) nd;
@@ -120,6 +122,7 @@ public static ColumnPrunerFilterProc getFilterProc() {
* Node Processor for Column Pruning on Group By Operators.
*/
public static class ColumnPrunerGroupByProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
GroupByOperator op = (GroupByOperator) nd;
@@ -154,6 +157,7 @@ public static ColumnPrunerGroupByProc getGroupByProc() {
}
public static class ColumnPrunerScriptProc implements NodeProcessor {
+ @Override
@SuppressWarnings("unchecked")
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
@@ -224,6 +228,7 @@ public static ColumnPrunerScriptProc getScriptProc() {
* and update the RR & signature on the PTFOp.
*/
public static class ColumnPrunerPTFProc extends ColumnPrunerScriptProc {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
@@ -327,6 +332,7 @@ public static ColumnPrunerPTFProc getPTFProc() {
* The Default Node Processor for Column Pruning.
*/
public static class ColumnPrunerDefaultProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
ColumnPrunerProcCtx cppCtx = (ColumnPrunerProcCtx) ctx;
@@ -351,6 +357,7 @@ public static ColumnPrunerDefaultProc getDefaultProc() {
* store needed columns in tableScanDesc.
*/
public static class ColumnPrunerTableScanProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
TableScanOperator scanOp = (TableScanOperator) nd;
@@ -426,6 +433,7 @@ public static ColumnPrunerTableScanProc getTableScanProc() {
* The Node Processor for Column Pruning on Reduce Sink Operators.
*/
public static class ColumnPrunerReduceSinkProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
ReduceSinkOperator op = (ReduceSinkOperator) nd;
@@ -435,6 +443,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
List colLists = new ArrayList();
ArrayList keys = conf.getKeyCols();
+ LOG.debug("Reduce Sink Operator " + op.getIdentifier() + " key:" + keys);
for (ExprNodeDesc key : keys) {
colLists = Utilities.mergeUniqElems(colLists, key.getCols());
}
@@ -456,7 +465,6 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
if (childCols != null) {
boolean[] flags = new boolean[valCols.size()];
- Map exprMap = op.getColumnExprMap();
for (String childCol : childCols) {
int index = valColNames.indexOf(Utilities.removeValueTag(childCol));
@@ -497,6 +505,7 @@ public static ColumnPrunerReduceSinkProc getReduceSinkProc() {
* The Node Processor for Column Pruning on Lateral View Join Operators.
*/
public static class ColumnPrunerLateralViewJoinProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
LateralViewJoinOperator op = (LateralViewJoinOperator) nd;
@@ -585,6 +594,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
* The Node Processor for Column Pruning on Select Operators.
*/
public static class ColumnPrunerSelectProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
SelectOperator op = (SelectOperator) nd;
@@ -748,6 +758,12 @@ private static void pruneReduceSinkOperator(boolean[] retainFlags,
nm = oldRR.reverseLookup(outputCol);
}
+ // In case there are multiple columns referenced to the same column name, we won't
+ // do row resolve once more because the ColumnInfo in row resolver is already removed
+ if (nm == null) {
+ continue;
+ }
+
// Only remove information of a column if it is not a key,
// i.e. this column is not appearing in keyExprs of the RS
if (ExprNodeDescUtils.indexOf(outputColExpr, keyExprs) == -1) {
@@ -795,6 +811,7 @@ public static ColumnPrunerLateralViewForwardProc getLateralViewForwardProc() {
* The Node Processor for Column Pruning on Join Operators.
*/
public static class ColumnPrunerJoinProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
JoinOperator op = (JoinOperator) nd;
@@ -817,9 +834,10 @@ public static ColumnPrunerJoinProc getJoinProc() {
* The Node Processor for Column Pruning on Map Join Operators.
*/
public static class ColumnPrunerMapJoinProc implements NodeProcessor {
+ @Override
public Object process(Node nd, Stack stack, NodeProcessorCtx ctx,
Object... nodeOutputs) throws SemanticException {
- MapJoinOperator op = (MapJoinOperator) nd;
+ AbstractMapJoinOperator op = (AbstractMapJoinOperator) nd;
pruneJoinOperator(ctx, op, op.getConf(), op.getColumnExprMap(), op
.getConf().getRetainList(), true);
return null;
@@ -878,6 +896,7 @@ private static void pruneJoinOperator(NodeProcessorCtx ctx,
List> childOperators = op
.getChildOperators();
+ LOG.info("JOIN " + op.getIdentifier() + " oldExprs: " + conf.getExprs());
List childColLists = cppCtx.genColLists(op);
if (childColLists == null) {
return;
@@ -985,6 +1004,7 @@ private static void pruneJoinOperator(NodeProcessorCtx ctx,
rs.add(col);
}
+ LOG.info("JOIN " + op.getIdentifier() + " newExprs: " + conf.getExprs());
op.setColumnExprMap(newColExprMap);
conf.setOutputColumnNames(outputCols);
op.getSchema().setSignature(rs);
diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java
new file mode 100644
index 0000000..b12d3a8
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagate.java
@@ -0,0 +1,171 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.optimizer;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.GroupByOperator;
+import org.apache.hadoop.hive.ql.exec.JoinOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.ScriptOperator;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
+import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher;
+import org.apache.hadoop.hive.ql.lib.Dispatcher;
+import org.apache.hadoop.hive.ql.lib.GraphWalker;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.NodeProcessor;
+import org.apache.hadoop.hive.ql.lib.Rule;
+import org.apache.hadoop.hive.ql.lib.RuleRegExp;
+import org.apache.hadoop.hive.ql.parse.OpParseContext;
+import org.apache.hadoop.hive.ql.parse.ParseContext;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+
+/**
+ * Implementation of one of the rule-based optimization steps. ConstantPropagate traverse the DAG
+ * from root to child. For each conditional expression, process as follows:
+ *
+ * 1. Fold constant expression: if the expression is a UDF and all parameters are constant.
+ *
+ * 2. Shortcut expression: if the expression is a logical operator and it can be shortcut by
+ * some constants of its parameters.
+ *
+ * 3. Propagate expression: if the expression is an assignment like column=constant, the expression
+ * will be propagate to parents to see if further folding operation is possible.
+ */
+public class ConstantPropagate implements Transform {
+
+ private static final Log LOG = LogFactory.getLog(ConstantPropagate.class);
+ protected ParseContext pGraphContext;
+ private Map, OpParseContext> opToParseCtxMap;
+
+ public ConstantPropagate() {}
+
+ /**
+ * Transform the query tree.
+ *
+ * @param pactx
+ * the current parse context
+ */
+ @Override
+ public ParseContext transform(ParseContext pactx) throws SemanticException {
+ if (pactx.getConf().getBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED)) {
+ // Constant propagate is currently conflict with vectorizer, disabling constant propagate
+ // if the later is enabled.
+ return pactx;
+ }
+
+ pGraphContext = pactx;
+ opToParseCtxMap = pGraphContext.getOpParseCtx();
+
+ // generate pruned column list for all relevant operators
+ ConstantPropagateProcCtx cppCtx = new ConstantPropagateProcCtx(opToParseCtxMap);
+
+ // create a walker which walks the tree in a DFS manner while maintaining
+ // the operator stack. The dispatcher
+ // generates the plan from the operator tree
+ Map opRules = new LinkedHashMap();
+
+ opRules.put(new RuleRegExp("R1", FilterOperator.getOperatorName() + "%"),
+ ConstantPropagateProcFactory.getFilterProc());
+ opRules.put(new RuleRegExp("R2", GroupByOperator.getOperatorName() + "%"),
+ ConstantPropagateProcFactory.getGroupByProc());
+ opRules.put(new RuleRegExp("R3", SelectOperator.getOperatorName() + "%"),
+ ConstantPropagateProcFactory.getSelectProc());
+ opRules.put(new RuleRegExp("R4", FileSinkOperator.getOperatorName() + "%"),
+ ConstantPropagateProcFactory.getFileSinkProc());
+ opRules.put(new RuleRegExp("R5", ReduceSinkOperator.getOperatorName() + "%"),
+ ConstantPropagateProcFactory.getReduceSinkProc());
+ opRules.put(new RuleRegExp("R6", JoinOperator.getOperatorName() + "%"),
+ ConstantPropagateProcFactory.getJoinProc());
+ opRules.put(new RuleRegExp("R7", TableScanOperator.getOperatorName() + "%"),
+ ConstantPropagateProcFactory.getTableScanProc());
+ opRules.put(new RuleRegExp("R8", ScriptOperator.getOperatorName() + "%"),
+ ConstantPropagateProcFactory.getStopProc());
+
+ // The dispatcher fires the processor corresponding to the closest matching
+ // rule and passes the context along
+ Dispatcher disp = new DefaultRuleDispatcher(ConstantPropagateProcFactory
+ .getDefaultProc(), opRules, cppCtx);
+ GraphWalker ogw = new ConstantPropagateWalker(disp);
+
+ // Create a list of operator nodes to start the walking.
+ ArrayList topNodes = new ArrayList();
+ topNodes.addAll(pGraphContext.getTopOps().values());
+ ogw.startWalking(topNodes, null);
+ for (Operator extends Serializable> opToDelete : cppCtx.getOpToDelete()) {
+ if (opToDelete.getParentOperators() == null || opToDelete.getParentOperators().size() != 1) {
+ throw new RuntimeException("Error pruning operator " + opToDelete
+ + ". It should have only 1 parent.");
+ }
+ opToDelete.getParentOperators().get(0).removeChildAndAdoptItsChildren(opToDelete);
+ }
+ return pGraphContext;
+ }
+
+
+ /**
+ * Walks the op tree in root first order.
+ */
+ public static class ConstantPropagateWalker extends DefaultGraphWalker {
+
+ public ConstantPropagateWalker(Dispatcher disp) {
+ super(disp);
+ }
+
+ @Override
+ public void walk(Node nd) throws SemanticException {
+
+ List parents = ((Operator) nd).getParentOperators();
+ if ((parents == null)
+ || getDispatchedList().containsAll(parents)) {
+ opStack.push(nd);
+
+ // all children are done or no need to walk the children
+ dispatch(nd, opStack);
+ opStack.pop();
+ } else {
+ getToWalk().removeAll(parents);
+ getToWalk().add(0, nd);
+ getToWalk().addAll(0, parents);
+ return;
+ }
+
+ // move all the children to the front of queue
+ List extends Node> children = nd.getChildren();
+ if (children != null) {
+ getToWalk().removeAll(children);
+ getToWalk().addAll(children);
+ }
+ }
+ }
+
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java
new file mode 100644
index 0000000..91af3aa
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcCtx.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.optimizer;
+
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.UnionOperator;
+import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
+import org.apache.hadoop.hive.ql.parse.OpParseContext;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.OperatorDesc;
+
+/**
+ * This class implements the processor context for Constant Propagate.
+ *
+ * ConstantPropagateProcCtx keeps track of propagated constants in a column->const map for each
+ * operator, enabling constants to be revolved across operators.
+ */
+public class ConstantPropagateProcCtx implements NodeProcessorCtx {
+
+ private static final org.apache.commons.logging.Log LOG = LogFactory
+ .getLog(ConstantPropagateProcCtx.class);
+
+ private final Map, Map> opToConstantExprs;
+ private final Map, OpParseContext> opToParseCtx;
+ private final List> opToDelete;
+
+ public ConstantPropagateProcCtx(Map, OpParseContext> opToParseCtx) {
+ opToConstantExprs =
+ new HashMap, Map>();
+ opToDelete = new ArrayList>();
+ this.opToParseCtx = opToParseCtx;
+ }
+
+ public Map, Map> getOpToConstantExprs() {
+ return opToConstantExprs;
+ }
+
+
+ public Map, OpParseContext> getOpToParseCtxMap() {
+ return opToParseCtx;
+ }
+
+ /**
+ * Resolve a ColumnInfo based on given RowResolver.
+ *
+ * @param ci
+ * @param rr
+ * @param parentRR
+ * @return
+ * @throws SemanticException
+ */
+ private ColumnInfo resolve(ColumnInfo ci, RowResolver rr, RowResolver parentRR)
+ throws SemanticException {
+ // Resolve new ColumnInfo from
+ String alias = ci.getAlias();
+ if (alias == null) {
+ alias = ci.getInternalName();
+ }
+ String tblAlias = ci.getTabAlias();
+ ColumnInfo rci = rr.get(tblAlias, alias);
+ if (rci == null && rr.getRslvMap().size() == 1 && parentRR.getRslvMap().size() == 1) {
+ rci = rr.get(null, alias);
+ }
+ if (rci == null) {
+ return null;
+ }
+ String[] tmp = rr.reverseLookup(rci.getInternalName());
+ rci.setTabAlias(tmp[0]);
+ rci.setAlias(tmp[1]);
+ LOG.debug("Resolved "
+ + ci.getTabAlias() + "." + ci.getAlias() + " as "
+ + rci.getTabAlias() + "." + rci.getAlias() + " with rr: " + rr);
+ return rci;
+ }
+
+ /**
+ * Get propagated constant map from parents.
+ *
+ * Traverse all parents of current operator, if there is propagated constant (determined by
+ * assignment expression like column=constant value), resolve the column using RowResolver and add
+ * it to current constant map.
+ *
+ * @param op
+ * operator getting the propagated constants.
+ * @return map of ColumnInfo to ExprNodeDesc. The values of that map must be either
+ * ExprNodeConstantDesc or ExprNodeNullDesc.
+ */
+ public Map getPropagatedConstants(
+ Operator extends Serializable> op) {
+ Map constants = new HashMap();
+ OpParseContext parseCtx = opToParseCtx.get(op);
+ if (parseCtx == null) {
+ return constants;
+ }
+ RowResolver rr = parseCtx.getRowResolver();
+ LOG.debug("Getting constants of op:" + op + " with rr:" + rr);
+
+ try {
+ if (op.getParentOperators() == null) {
+ return constants;
+ }
+
+ if (op instanceof UnionOperator) {
+ String alias = (String) rr.getRslvMap().keySet().toArray()[0];
+ // find intersection
+ Map intersection = null;
+ for (Operator> parent : op.getParentOperators()) {
+ Map unionConst = opToConstantExprs.get(parent);
+ LOG.debug("Constant of op " + parent.getOperatorId() + " " + unionConst);
+ if (intersection == null) {
+ intersection = new HashMap();
+ for (Entry e : unionConst.entrySet()) {
+ ColumnInfo ci = new ColumnInfo(e.getKey());
+ ci.setTabAlias(alias);
+ intersection.put(ci, e.getValue());
+ }
+ } else {
+ Iterator> itr = intersection.entrySet().iterator();
+ while (itr.hasNext()) {
+ Entry e = itr.next();
+ boolean found = false;
+ for (Entry f : opToConstantExprs.get(parent).entrySet()) {
+ if (e.getKey().getInternalName().equals(f.getKey().getInternalName())) {
+ if (e.getValue().isSame(f.getValue())) {
+ found = true;
+ }
+ break;
+ }
+ }
+ if (!found) {
+ itr.remove();
+ }
+ }
+ }
+ if (intersection.isEmpty()) {
+ return intersection;
+ }
+ }
+ LOG.debug("Propagated union constants:" + intersection);
+ return intersection;
+ }
+
+ for (Operator extends Serializable> parent : op.getParentOperators()) {
+ Map c = opToConstantExprs.get(parent);
+ for (Entry e : c.entrySet()) {
+ ColumnInfo ci = e.getKey();
+ ColumnInfo rci = null;
+ ExprNodeDesc constant = e.getValue();
+ rci = resolve(ci, rr, opToParseCtx.get(parent).getRowResolver());
+ if (rci != null) {
+ constants.put(rci, constant);
+ } else {
+ LOG.debug("Can't resolve " + ci.getTabAlias() + "." + ci.getAlias() + " from rr:"
+ + rr);
+ }
+
+ }
+
+ }
+ LOG.debug("Offerring constants " + constants.keySet()
+ + " to operator " + op.toString());
+ return constants;
+ } catch (SemanticException e) {
+ LOG.error(e.getMessage(), e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ public RowResolver getRowResolver(Operator extends Serializable> op) {
+ OpParseContext parseCtx = opToParseCtx.get(op);
+ if (parseCtx == null) {
+ return null;
+ }
+ return parseCtx.getRowResolver();
+ }
+
+ public void addOpToDelete(Operator extends Serializable> op) {
+ opToDelete.add(op);
+ }
+
+ public List> getOpToDelete() {
+ return opToDelete;
+ }
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
new file mode 100644
index 0000000..c1cc9f4
--- /dev/null
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ConstantPropagateProcFactory.java
@@ -0,0 +1,944 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license
+ * agreements. See the NOTICE file distributed with this work for additional information regarding
+ * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
+ * or implied. See the License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hive.ql.optimizer;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.Stack;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
+import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.GroupByOperator;
+import org.apache.hadoop.hive.ql.exec.JoinOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
+import org.apache.hadoop.hive.ql.exec.RowSchema;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.UDF;
+import org.apache.hadoop.hive.ql.lib.Node;
+import org.apache.hadoop.hive.ql.lib.NodeProcessor;
+import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.RowResolver;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx;
+import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeNullDesc;
+import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
+import org.apache.hadoop.hive.ql.plan.GroupByDesc;
+import org.apache.hadoop.hive.ql.plan.JoinCondDesc;
+import org.apache.hadoop.hive.ql.plan.JoinDesc;
+import org.apache.hadoop.hive.ql.plan.PlanUtils;
+import org.apache.hadoop.hive.ql.plan.ReduceSinkDesc;
+import org.apache.hadoop.hive.ql.plan.TableScanDesc;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPNull;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPOr;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+import com.google.common.collect.ImmutableSet;
+
+/**
+ * Factory for generating the different node processors used by ConstantPropagate.
+ */
+public final class ConstantPropagateProcFactory {
+ protected static final Log LOG = LogFactory.getLog(ConstantPropagateProcFactory.class.getName());
+ protected static Set> propagatableUdfs = new HashSet>();
+
+ static {
+ propagatableUdfs.add(GenericUDFOPAnd.class);
+ };
+
+ private ConstantPropagateProcFactory() {
+ // prevent instantiation
+ }
+
+ /**
+ * Get ColumnInfo from column expression.
+ *
+ * @param rr
+ * @param desc
+ * @return
+ */
+ public static ColumnInfo resolveColumn(RowResolver rr,
+ ExprNodeColumnDesc desc) {
+ try {
+ ColumnInfo ci = rr.get(desc.getTabAlias(), desc.getColumn());
+ if (ci == null) {
+ String[] tmp = rr.reverseLookup(desc.getColumn());
+ if (tmp == null) {
+ return null;
+ }
+ ci = rr.get(tmp[0], tmp[1]);
+ ci.setTabAlias(tmp[0]);
+ ci.setAlias(tmp[1]);
+ } else {
+ String[] tmp = rr.reverseLookup(ci.getInternalName());
+ if (tmp == null) {
+ return null;
+ }
+ ci.setTabAlias(tmp[0]);
+ ci.setAlias(tmp[1]);
+ }
+ return ci;
+ } catch (SemanticException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static final Set unSupportedTypes = ImmutableSet
+ .builder()
+ .add(PrimitiveCategory.DECIMAL)
+ .add(PrimitiveCategory.VARCHAR)
+ .add(PrimitiveCategory.CHAR).build();
+
+ /**
+ * Cast type from expression type to expected type ti.
+ *
+ * @param desc constant expression
+ * @param ti expected type info
+ * @return cast constant, or null if the type cast failed.
+ */
+ private static ExprNodeConstantDesc typeCast(ExprNodeDesc desc, TypeInfo ti) {
+ if (desc instanceof ExprNodeNullDesc) {
+ return null;
+ }
+ if (!(ti instanceof PrimitiveTypeInfo) || !(desc.getTypeInfo() instanceof PrimitiveTypeInfo)) {
+ return null;
+ }
+
+ PrimitiveTypeInfo priti = (PrimitiveTypeInfo) ti;
+ PrimitiveTypeInfo descti = (PrimitiveTypeInfo) desc.getTypeInfo();
+
+ if (unSupportedTypes.contains(priti.getPrimitiveCategory())
+ || unSupportedTypes.contains(descti.getPrimitiveCategory())) {
+ // FIXME: support template types. It currently has conflict with
+ // ExprNodeConstantDesc
+ return null;
+ }
+ LOG.debug("Casting " + desc + " to type " + ti);
+ ExprNodeConstantDesc c = (ExprNodeConstantDesc) desc;
+ ObjectInspector origOI =
+ TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(desc.getTypeInfo());
+ ObjectInspector oi =
+ TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(ti);
+ Converter converter = ObjectInspectorConverters.getConverter(origOI, oi);
+ Object convObj = converter.convert(c.getValue());
+
+ // Convert integer related types because converters are not sufficient
+ if (convObj instanceof Integer) {
+ switch (priti.getPrimitiveCategory()) {
+ case BYTE:
+ convObj = new Byte((byte) (((Integer) convObj).intValue()));
+ break;
+ case SHORT:
+ convObj = new Short((short) ((Integer) convObj).intValue());
+ break;
+ case LONG:
+ convObj = new Long(((Integer) convObj).intValue());
+ default:
+ }
+ }
+ return new ExprNodeConstantDesc(ti, convObj);
+ }
+
+ /**
+ * Fold input expression desc.
+ *
+ * If desc is a UDF and all parameters are constants, evaluate it. If desc is a column expression,
+ * find it from propagated constants, and if there is, replace it with constant.
+ *
+ * @param desc folding expression
+ * @param constants current propagated constant map
+ * @param cppCtx
+ * @param op processing operator
+ * @param propagate if true, assignment expressions will be added to constants.
+ * @return fold expression
+ */
+ private static ExprNodeDesc foldExpr(ExprNodeDesc desc, Map constants,
+ ConstantPropagateProcCtx cppCtx, Operator extends Serializable> op, int tag,
+ boolean propagate) {
+ if (desc instanceof ExprNodeGenericFuncDesc) {
+ ExprNodeGenericFuncDesc funcDesc = (ExprNodeGenericFuncDesc) desc;
+
+ // The function must be deterministic, or we can't fold it.
+ GenericUDF udf = funcDesc.getGenericUDF();
+ if (!isDeterministicUdf(udf)) {
+ LOG.debug("Function " + udf.getClass() + " undeterministic, quit folding.");
+ return desc;
+ }
+
+ boolean propagateNext = propagate && propagatableUdfs.contains(udf.getClass());
+ List newExprs = new ArrayList();
+ for (ExprNodeDesc childExpr : desc.getChildren()) {
+ newExprs.add(foldExpr(childExpr, constants, cppCtx, op, tag, propagateNext));
+ }
+
+ // If all child expressions are constants, evaluate UDF immediately
+ ExprNodeDesc constant = evaluateFunction(udf, newExprs, desc.getChildren());
+ if (constant != null) {
+ LOG.debug("Folding expression:" + desc + " -> " + constant);
+ return constant;
+ } else {
+
+ // Check if the function can be short cut.
+ ExprNodeDesc shortcut = shortcutFunction(udf, newExprs);
+ if (shortcut != null) {
+ LOG.debug("Folding expression:" + desc + " -> " + shortcut);
+ return shortcut;
+ }
+ ((ExprNodeGenericFuncDesc) desc).setChildren(newExprs);
+ }
+
+ // If in some selected binary operators (=, is null, etc), one of the
+ // expressions are
+ // constant, add them to colToConstatns as half-deterministic columns.
+ if (propagate) {
+ propagate(udf, newExprs, cppCtx.getRowResolver(op), constants);
+ }
+
+ return desc;
+ } else if (desc instanceof ExprNodeColumnDesc) {
+ if (op.getParentOperators() == null || op.getParentOperators().isEmpty()) {
+ return desc;
+ }
+ Operator extends Serializable> parent = op.getParentOperators().get(tag);
+ ExprNodeDesc col = evaluateColumn((ExprNodeColumnDesc) desc, cppCtx, parent);
+ if (col != null) {
+ LOG.debug("Folding expression:" + desc + " -> " + col);
+ return col;
+ }
+ }
+ return desc;
+ }
+
+ private static boolean isDeterministicUdf(GenericUDF udf) {
+ UDFType udfType = udf.getClass().getAnnotation(UDFType.class);
+ if (udf instanceof GenericUDFBridge) {
+ udfType = ((GenericUDFBridge) udf).getUdfClass().getAnnotation(UDFType.class);
+ }
+ if (udfType.deterministic() == false) {
+ return false;
+ }
+
+ // If udf is requiring additional jars, we can't determine the result in
+ // compile time.
+ String[] files;
+ String[] jars;
+ if (udf instanceof GenericUDFBridge) {
+ GenericUDFBridge bridge = (GenericUDFBridge) udf;
+ String udfClassName = bridge.getUdfClassName();
+ try {
+ UDF udfInternal =
+ (UDF) Class.forName(bridge.getUdfClassName(), true, JavaUtils.getClassLoader())
+ .newInstance();
+ files = udfInternal.getRequiredFiles();
+ jars = udf.getRequiredJars();
+ } catch (Exception e) {
+ LOG.error("The UDF implementation class '" + udfClassName
+ + "' is not present in the class path");
+ return false;
+ }
+ } else {
+ files = udf.getRequiredFiles();
+ jars = udf.getRequiredJars();
+ }
+ if (files != null || jars != null) {
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Propagate assignment expression, adding an entry into constant map constants.
+ *
+ * @param udf expression UDF, currently only 2 UDFs are supported: '=' and 'is null'.
+ * @param newExprs child expressions (parameters).
+ * @param cppCtx
+ * @param op
+ * @param constants
+ */
+ private static void propagate(GenericUDF udf, List newExprs, RowResolver rr,
+ Map constants) {
+ if (udf instanceof GenericUDFOPEqual) {
+ ExprNodeDesc lOperand = newExprs.get(0);
+ ExprNodeDesc rOperand = newExprs.get(1);
+ ExprNodeColumnDesc c;
+ ExprNodeConstantDesc v;
+ if (lOperand instanceof ExprNodeColumnDesc && rOperand instanceof ExprNodeConstantDesc) {
+ c = (ExprNodeColumnDesc) lOperand;
+ v = (ExprNodeConstantDesc) rOperand;
+ } else if (rOperand instanceof ExprNodeColumnDesc && lOperand instanceof ExprNodeConstantDesc) {
+ c = (ExprNodeColumnDesc) rOperand;
+ v = (ExprNodeConstantDesc) lOperand;
+ } else {
+ return;
+ }
+ ColumnInfo ci = resolveColumn(rr, c);
+ if (ci != null) {
+ LOG.debug("Filter " + udf + " is identified as a value assignment, propagate it.");
+ if (!v.getTypeInfo().equals(ci.getType())) {
+ v = typeCast(v, ci.getType());
+ }
+ if (v != null) {
+ constants.put(ci, v);
+ }
+ }
+ } else if (udf instanceof GenericUDFOPNull) {
+ ExprNodeDesc operand = newExprs.get(0);
+ if (operand instanceof ExprNodeColumnDesc) {
+ LOG.debug("Filter " + udf + " is identified as a value assignment, propagate it.");
+ ExprNodeColumnDesc c = (ExprNodeColumnDesc) operand;
+ ColumnInfo ci = resolveColumn(rr, c);
+ if (ci != null) {
+ constants.put(ci, new ExprNodeNullDesc());
+ }
+ }
+ }
+ }
+
+ private static ExprNodeDesc shortcutFunction(GenericUDF udf, List newExprs) {
+ if (udf instanceof GenericUDFOPAnd) {
+ for (int i = 0; i < 2; i++) {
+ ExprNodeDesc childExpr = newExprs.get(i);
+ if (childExpr instanceof ExprNodeConstantDesc) {
+ ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr;
+ if (c.getValue() == Boolean.TRUE) {
+
+ // if true, prune it
+ return newExprs.get(Math.abs(i - 1));
+ } else {
+
+ // if false return false
+ return childExpr;
+ }
+ }
+ }
+ }
+
+ if (udf instanceof GenericUDFOPOr) {
+ for (int i = 0; i < 2; i++) {
+ ExprNodeDesc childExpr = newExprs.get(i);
+ if (childExpr instanceof ExprNodeConstantDesc) {
+ ExprNodeConstantDesc c = (ExprNodeConstantDesc) childExpr;
+ if (c.getValue() == Boolean.FALSE) {
+
+ // if false, prune it
+ return newExprs.get(Math.abs(i - 1));
+ } else {
+
+ // if true return true
+ return childExpr;
+ }
+ }
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Evaluate column, replace the deterministic columns with constants if possible
+ *
+ * @param desc
+ * @param ctx
+ * @param op
+ * @param colToConstants
+ * @return
+ */
+ private static ExprNodeDesc evaluateColumn(ExprNodeColumnDesc desc,
+ ConstantPropagateProcCtx cppCtx, Operator extends Serializable> parent) {
+ try {
+ ColumnInfo ci = null;
+ RowResolver rr = cppCtx.getOpToParseCtxMap().get(parent).getRowResolver();
+ String[] tmp = rr.reverseLookup(desc.getColumn());
+ if (tmp == null) {
+ LOG.error("Reverse look up of column " + desc + " error!");
+ return null;
+ }
+ ci = rr.get(tmp[0], tmp[1]);
+ if (ci != null) {
+ ExprNodeDesc constant = null;
+ // Additional work for union operator, see union27.q
+ if (ci.getAlias() == null) {
+ for (Entry e : cppCtx.getOpToConstantExprs().get(parent).entrySet()) {
+ if (e.getKey().getInternalName().equals(ci.getInternalName())) {
+ constant = e.getValue();
+ break;
+ }
+ }
+ } else {
+ constant = cppCtx.getOpToConstantExprs().get(parent).get(ci);
+ }
+ if (constant != null) {
+ if (constant instanceof ExprNodeConstantDesc
+ && !constant.getTypeInfo().equals(desc.getTypeInfo())) {
+ return typeCast(constant, desc.getTypeInfo());
+ }
+ return constant;
+ } else {
+ return null;
+ }
+ }
+ LOG.error("Can't resolve " + desc.getTabAlias() + "." + desc.getColumn());
+ throw new RuntimeException("Can't resolve " + desc.getTabAlias() + "." + desc.getColumn());
+ } catch (SemanticException e) {
+ throw new RuntimeException(e);
+ }
+
+ }
+
+ /**
+ * Evaluate UDF
+ *
+ * @param udf UDF object
+ * @param exprs
+ * @param oldExprs
+ * @return null if expression cannot be evaluated (not all parameters are constants). Or evaluated
+ * ExprNodeConstantDesc if possible.
+ * @throws HiveException
+ */
+ private static ExprNodeDesc evaluateFunction(GenericUDF udf, List exprs,
+ List oldExprs) {
+ DeferredJavaObject[] arguments = new DeferredJavaObject[exprs.size()];
+ ObjectInspector[] argois = new ObjectInspector[exprs.size()];
+ for (int i = 0; i < exprs.size(); i++) {
+ ExprNodeDesc desc = exprs.get(i);
+ if (desc instanceof ExprNodeConstantDesc) {
+ ExprNodeConstantDesc constant = (ExprNodeConstantDesc) exprs.get(i);
+ if (!constant.getTypeInfo().equals(oldExprs.get(i).getTypeInfo())) {
+ constant = typeCast(constant, oldExprs.get(i).getTypeInfo());
+ if (constant == null) {
+ return null;
+ }
+ }
+ Object value = constant.getValue();
+ PrimitiveTypeInfo pti = (PrimitiveTypeInfo) constant.getTypeInfo();
+ Object writableValue =
+ PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti)
+ .getPrimitiveWritableObject(value);
+ arguments[i] = new DeferredJavaObject(writableValue);
+ argois[i] =
+ ObjectInspectorUtils.getConstantObjectInspector(constant.getWritableObjectInspector(),
+ writableValue);
+ } else if (desc instanceof ExprNodeNullDesc) {
+
+ // FIXME: add null support.
+ return null;
+ } else {
+ return null;
+ }
+ }
+
+ try {
+ ObjectInspector oi = udf.initialize(argois);
+ Object o = udf.evaluate(arguments);
+ LOG.debug(udf.getClass().getName() + "(" + exprs + ")=" + o);
+ if (o == null) {
+ return new ExprNodeNullDesc();
+ }
+ Class> clz = o.getClass();
+ if (PrimitiveObjectInspectorUtils.isPrimitiveWritableClass(clz)) {
+ PrimitiveObjectInspector poi = (PrimitiveObjectInspector) oi;
+ TypeInfo typeInfo = poi.getTypeInfo();
+
+ // Handling parameterized types (varchar, decimal, etc).
+ if (typeInfo.getTypeName().contains(serdeConstants.DECIMAL_TYPE_NAME)
+ || typeInfo.getTypeName().contains(serdeConstants.VARCHAR_TYPE_NAME)
+ || typeInfo.getTypeName().contains(serdeConstants.CHAR_TYPE_NAME)) {
+
+ // Do not support parameterized types.
+ return null;
+ }
+ o = poi.getPrimitiveJavaObject(o);
+ } else if (PrimitiveObjectInspectorUtils.isPrimitiveJavaClass(clz)) {
+
+ } else {
+ LOG.error("Unable to evaluate " + udf + ". Return value unrecoginizable.");
+ return null;
+ }
+ return new ExprNodeConstantDesc(o);
+ } catch (HiveException e) {
+ LOG.error("Evaluation function " + udf.getClass()
+ + " failed in Constant Propagatation Optimizer.");
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Change operator row schema, replace column with constant if it is.
+ *
+ * @param op
+ * @param constants
+ * @throws SemanticException
+ */
+ private static void foldOperator(Operator extends Serializable> op,
+ ConstantPropagateProcCtx cppCtx) throws SemanticException {
+ RowSchema schema = op.getSchema();
+ Map constants = cppCtx.getOpToConstantExprs().get(op);
+ if (schema != null && schema.getSignature() != null) {
+ for (ColumnInfo col : schema.getSignature()) {
+ ExprNodeDesc constant = constants.get(col);
+ if (constant != null) {
+ LOG.debug("Replacing column " + col + " with constant " + constant + " in " + op);
+ if (!col.getType().equals(constant.getTypeInfo())) {
+ constant = typeCast(constant, col.getType());
+ }
+ if (constant != null) {
+ col.setObjectinspector(constant.getWritableObjectInspector());
+ }
+ }
+ }
+ }
+
+ Map colExprMap = op.getColumnExprMap();
+ if (colExprMap != null) {
+ for (Entry e : constants.entrySet()) {
+ String internalName = e.getKey().getInternalName();
+ if (colExprMap.containsKey(internalName)) {
+ colExprMap.put(internalName, e.getValue());
+ }
+ }
+ }
+ }
+
+ /**
+ * Node Processor for Constant Propagation on Filter Operators. The processor is to fold
+ * conditional expressions and extract assignment expressions and propagate them.
+ */
+ public static class ConstantPropagateFilterProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ FilterOperator op = (FilterOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+
+ ExprNodeDesc condn = op.getConf().getPredicate();
+ LOG.debug("Old filter FIL[" + op.getIdentifier() + "] conditions:" + condn.getExprString());
+ ExprNodeDesc newCondn = foldExpr(condn, constants, cppCtx, op, 0, true);
+ if (newCondn instanceof ExprNodeConstantDesc) {
+ ExprNodeConstantDesc c = (ExprNodeConstantDesc) newCondn;
+ if (c.getValue() == Boolean.TRUE) {
+ cppCtx.addOpToDelete(op);
+ LOG.debug("Filter expression " + condn + " holds true. Will delete it.");
+ } else if (c.getValue() == Boolean.FALSE) {
+ LOG.warn("Filter expression " + condn + " holds false!");
+ }
+ }
+ LOG.debug("New filter FIL[" + op.getIdentifier() + "] conditions:" + newCondn.getExprString());
+
+ // merge it with the downstream col list
+ op.getConf().setPredicate(newCondn);
+ foldOperator(op, cppCtx);
+ return null;
+ }
+
+ }
+
+ /**
+ * Factory method to get the ConstantPropagateFilterProc class.
+ *
+ * @return ConstantPropagateFilterProc
+ */
+ public static ConstantPropagateFilterProc getFilterProc() {
+ return new ConstantPropagateFilterProc();
+ }
+
+ /**
+ * Node Processor for Constant Propagate for Group By Operators.
+ */
+ public static class ConstantPropagateGroupByProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ GroupByOperator op = (GroupByOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map colToConstants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, colToConstants);
+
+ if (colToConstants.isEmpty()) {
+ return null;
+ }
+
+ GroupByDesc conf = op.getConf();
+ ArrayList keys = conf.getKeys();
+ for (int i = 0; i < keys.size(); i++) {
+ ExprNodeDesc key = keys.get(i);
+ ExprNodeDesc newkey = foldExpr(key, colToConstants, cppCtx, op, 0, false);
+ keys.set(i, newkey);
+ }
+ foldOperator(op, cppCtx);
+ return null;
+ }
+ }
+
+ /**
+ * Factory method to get the ConstantPropagateGroupByProc class.
+ *
+ * @return ConstantPropagateGroupByProc
+ */
+ public static ConstantPropagateGroupByProc getGroupByProc() {
+ return new ConstantPropagateGroupByProc();
+ }
+
+ /**
+ * The Default Node Processor for Constant Propagation.
+ */
+ public static class ConstantPropagateDefaultProc implements NodeProcessor {
+ @SuppressWarnings("unchecked")
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Operator extends Serializable> op = (Operator extends Serializable>) nd;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ if (constants.isEmpty()) {
+ return null;
+ }
+ foldOperator(op, cppCtx);
+ return null;
+ }
+ }
+
+ /**
+ * Factory method to get the ConstantPropagateDefaultProc class.
+ *
+ * @return ConstantPropagateDefaultProc
+ */
+ public static ConstantPropagateDefaultProc getDefaultProc() {
+ return new ConstantPropagateDefaultProc();
+ }
+
+ /**
+ * The Node Processor for Constant Propagation for Select Operators.
+ */
+ public static class ConstantPropagateSelectProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ SelectOperator op = (SelectOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ foldOperator(op, cppCtx);
+ List colList = op.getConf().getColList();
+ if (colList != null) {
+ for (int i = 0; i < colList.size(); i++) {
+ ExprNodeDesc newCol = foldExpr(colList.get(i), constants, cppCtx, op, 0, false);
+ colList.set(i, newCol);
+ }
+ LOG.debug("New column list:(" + StringUtils.join(colList, " ") + ")");
+ }
+ return null;
+ }
+ }
+
+ /**
+ * The Factory method to get the ConstantPropagateSelectProc class.
+ *
+ * @return ConstantPropagateSelectProc
+ */
+ public static ConstantPropagateSelectProc getSelectProc() {
+ return new ConstantPropagateSelectProc();
+ }
+
+ /**
+ * The Node Processor for constant propagation for FileSink Operators. In addition to constant
+ * propagation, this processor also prunes dynamic partitions to static partitions if possible.
+ */
+ public static class ConstantPropagateFileSinkProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ FileSinkOperator op = (FileSinkOperator) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ if (constants.isEmpty()) {
+ return null;
+ }
+ FileSinkDesc fsdesc = op.getConf();
+ DynamicPartitionCtx dpCtx = fsdesc.getDynPartCtx();
+ if (dpCtx != null) {
+
+ // If all dynamic partitions are propagated as constant, remove DP.
+ Set inputs = dpCtx.getInputToDPCols().keySet();
+
+ // Assume only 1 parent for FS operator
+ Operator extends Serializable> parent = op.getParentOperators().get(0);
+ Map parentConstants = cppCtx.getPropagatedConstants(parent);
+ RowResolver rr = cppCtx.getOpToParseCtxMap().get(parent).getRowResolver();
+ boolean allConstant = true;
+ for (String input : inputs) {
+ String tmp[] = rr.reverseLookup(input);
+ ColumnInfo ci = rr.get(tmp[0], tmp[1]);
+ if (parentConstants.get(ci) == null) {
+ allConstant = false;
+ break;
+ }
+ }
+ if (allConstant) {
+ pruneDP(fsdesc);
+ }
+ }
+ foldOperator(op, cppCtx);
+ return null;
+ }
+
+ private void pruneDP(FileSinkDesc fsdesc) {
+ // FIXME: Support pruning dynamic partitioning.
+ LOG.info("DP can be rewritten to SP!");
+ }
+ }
+
+ public static NodeProcessor getFileSinkProc() {
+ return new ConstantPropagateFileSinkProc();
+ }
+
+ /**
+ * The Node Processor for Constant Propagation for Operators which is designed to stop propagate.
+ * Currently these kinds of Operators include UnionOperator and ScriptOperator.
+ */
+ public static class ConstantPropagateStopProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ Operator> op = (Operator>) nd;
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ cppCtx.getOpToConstantExprs().put(op, new HashMap());
+ LOG.debug("Stop propagate constants on op " + op.getOperatorId());
+ return null;
+ }
+ }
+
+ public static NodeProcessor getStopProc() {
+ return new ConstantPropagateStopProc();
+ }
+
+ /**
+ * The Node Processor for Constant Propagation for ReduceSink Operators. If the RS Operator is for
+ * a join, then only those constants from inner join tables, or from the 'inner side' of a outer
+ * join (left table for left outer join and vice versa) can be propagated.
+ */
+ public static class ConstantPropagateReduceSinkProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ ReduceSinkOperator op = (ReduceSinkOperator) nd;
+ ReduceSinkDesc rsDesc = op.getConf();
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ if (constants.isEmpty()) {
+ return null;
+ }
+
+ if (op.getChildOperators().size() == 1
+ && op.getChildOperators().get(0) instanceof JoinOperator) {
+ JoinOperator joinOp = (JoinOperator) op.getChildOperators().get(0);
+ if (skipFolding(joinOp.getConf(), rsDesc.getTag())) {
+ LOG.debug("Skip folding in outer join " + op);
+ cppCtx.getOpToConstantExprs().put(op, new HashMap());
+ return null;
+ }
+ }
+
+ if (rsDesc.getDistinctColumnIndices() != null
+ && !rsDesc.getDistinctColumnIndices().isEmpty()) {
+ LOG.debug("Skip folding in distinct subqueries " + op);
+ cppCtx.getOpToConstantExprs().put(op, new HashMap());
+ return null;
+ }
+
+ // key columns
+ ArrayList newKeyEpxrs = new ArrayList();
+ for (ExprNodeDesc desc : rsDesc.getKeyCols()) {
+ newKeyEpxrs.add(foldExpr(desc, constants, cppCtx, op, 0, false));
+ }
+ rsDesc.setKeyCols(newKeyEpxrs);
+
+ // partition columns
+ ArrayList newPartExprs = new ArrayList();
+ for (ExprNodeDesc desc : rsDesc.getPartitionCols()) {
+ ExprNodeDesc expr = foldExpr(desc, constants, cppCtx, op, 0, false);
+ if (expr instanceof ExprNodeConstantDesc || expr instanceof ExprNodeNullDesc) {
+ continue;
+ }
+ newPartExprs.add(expr);
+ }
+ rsDesc.setPartitionCols(newPartExprs);
+
+ // value columns
+ ArrayList newValExprs = new ArrayList();
+ for (ExprNodeDesc desc : rsDesc.getValueCols()) {
+ newValExprs.add(foldExpr(desc, constants, cppCtx, op, 0, false));
+ }
+ rsDesc.setValueCols(newValExprs);
+ foldOperator(op, cppCtx);
+ return null;
+ }
+
+ private boolean skipFolding(JoinDesc joinDesc, int tag) {
+ JoinCondDesc[] conds = joinDesc.getConds();
+ int i;
+ for (i = conds.length - 1; i >= 0; i--) {
+ if (conds[i].getType() == JoinDesc.INNER_JOIN) {
+ if (tag == i + 1)
+ return false;
+ } else if (conds[i].getType() == JoinDesc.FULL_OUTER_JOIN) {
+ return true;
+ } else if (conds[i].getType() == JoinDesc.RIGHT_OUTER_JOIN) {
+ if (tag == i + 1)
+ return false;
+ return true;
+ } else if (conds[i].getType() == JoinDesc.LEFT_OUTER_JOIN) {
+ if (tag == i + 1)
+ return true;
+ }
+ }
+ if (tag == 0) {
+ return false;
+ }
+ return true;
+ }
+
+ }
+
+ public static NodeProcessor getReduceSinkProc() {
+ return new ConstantPropagateReduceSinkProc();
+ }
+
+ /**
+ * The Node Processor for Constant Propagation for Join Operators.
+ */
+ public static class ConstantPropagateJoinProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ JoinOperator op = (JoinOperator) nd;
+ JoinDesc conf = op.getConf();
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ if (constants.isEmpty()) {
+ return null;
+ }
+
+ // Note: the following code (removing folded constants in exprs) is deeply coupled with
+ // ColumnPruner optimizer.
+ // Assuming ColumnPrunner will remove constant columns so we don't deal with output columns.
+ // Except one case that the join operator is followed by a redistribution (RS operator).
+ if (op.getChildOperators().size() == 1
+ && op.getChildOperators().get(0) instanceof ReduceSinkOperator) {
+ LOG.debug("Skip JOIN-RS structure.");
+ return null;
+ }
+ LOG.info("Old exprs " + conf.getExprs());
+ Iterator>> itr = conf.getExprs().entrySet().iterator();
+ while (itr.hasNext()) {
+ Entry> e = itr.next();
+ int tag = e.getKey();
+ List exprs = e.getValue();
+ if (exprs == null) {
+ continue;
+ }
+ List newExprs = new ArrayList();
+ for (ExprNodeDesc expr : exprs) {
+ ExprNodeDesc newExpr = foldExpr(expr, constants, cppCtx, op, tag, false);
+ if (newExpr instanceof ExprNodeConstantDesc || newExpr instanceof ExprNodeNullDesc) {
+ LOG.info("expr " + newExpr + " fold from " + expr + " is removed.");
+ continue;
+ }
+ newExprs.add(newExpr);
+ }
+ e.setValue(newExprs);
+ }
+ LOG.info("New exprs " + conf.getExprs());
+
+ for (List v : conf.getFilters().values()) {
+ for (int i = 0; i < v.size(); i++) {
+ ExprNodeDesc expr = foldExpr(v.get(i), constants, cppCtx, op, 0, false);
+ v.set(i, expr);
+ }
+ }
+ foldOperator(op, cppCtx);
+ return null;
+ }
+
+ }
+
+ public static NodeProcessor getJoinProc() {
+ return new ConstantPropagateJoinProc();
+ }
+
+ /**
+ * The Node Processor for Constant Propagation for Table Scan Operators.
+ */
+ public static class ConstantPropagateTableScanProc implements NodeProcessor {
+ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, Object... nodeOutputs)
+ throws SemanticException {
+ TableScanOperator op = (TableScanOperator) nd;
+ TableScanDesc conf = op.getConf();
+ ConstantPropagateProcCtx cppCtx = (ConstantPropagateProcCtx) ctx;
+ Map constants = cppCtx.getPropagatedConstants(op);
+ cppCtx.getOpToConstantExprs().put(op, constants);
+ ExprNodeGenericFuncDesc pred = conf.getFilterExpr();
+ if (pred == null) {
+ return null;
+ }
+
+ List newChildren = new ArrayList();
+ for (ExprNodeDesc expr : pred.getChildren()) {
+ ExprNodeDesc constant = foldExpr(expr, constants, cppCtx, op, 0, false);
+ newChildren.add(constant);
+ }
+ pred.setChildren(newChildren);
+ return null;
+ }
+ }
+
+ public static NodeProcessor getTableScanProc() {
+ return new ConstantPropagateTableScanProc();
+ }
+}
diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
index a93e178..e4e1141 100644
--- ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
+++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/Optimizer.java
@@ -63,18 +63,11 @@ public void initialize(HiveConf hiveConf) {
transformations.add(new ListBucketingPruner());
}
}
- if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTGROUPBY) ||
- HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_MAP_GROUPBY_SORT)) {
+ if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTGROUPBY)
+ || HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVE_MAP_GROUPBY_SORT)) {
transformations.add(new GroupByOptimizer());
}
- transformations.add(new ColumnPruner());
- if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME)) {
- transformations.add(new SkewJoinOptimizer());
- }
- if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTGBYUSINGINDEX)) {
- transformations.add(new RewriteGBUsingIndex());
- }
- transformations.add(new SamplePruner());
transformations.add(new MapJoinProcessor());
boolean bucketMapJoinOptimizer = false;
if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTBUCKETMAPJOIN)) {
@@ -84,7 +77,8 @@ public void initialize(HiveConf hiveConf) {
// If optimize hive.optimize.bucketmapjoin.sortedmerge is set, add both
// BucketMapJoinOptimizer and SortedMergeBucketMapJoinOptimizer
- if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTSORTMERGEBUCKETMAPJOIN)) {
+ if (HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVEOPTSORTMERGEBUCKETMAPJOIN)) {
if (!bucketMapJoinOptimizer) {
// No need to add BucketMapJoinOptimizer twice
transformations.add(new BucketMapJoinOptimizer());
@@ -92,38 +86,59 @@ public void initialize(HiveConf hiveConf) {
transformations.add(new SortedMergeBucketMapJoinOptimizer());
}
- if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTIMIZEBUCKETINGSORTING)) {
+ if (HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVEOPTIMIZEBUCKETINGSORTING)) {
transformations.add(new BucketingSortingReduceSinkOptimizer());
}
-
+ if (HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVEOPTCONSTANTPROPAGATION)) {
+ transformations.add(new ConstantPropagate());
+ }
+ transformations.add(new ColumnPruner());
+ if (HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME)) {
+ transformations.add(new SkewJoinOptimizer());
+ }
+ if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTGBYUSINGINDEX)) {
+ transformations.add(new RewriteGBUsingIndex());
+ }
+ transformations.add(new SamplePruner());
transformations.add(new UnionProcessor());
transformations.add(new JoinReorder());
- if(HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.DYNAMICPARTITIONING) &&
- HiveConf.getVar(hiveConf, HiveConf.ConfVars.DYNAMICPARTITIONINGMODE).equals("nonstrict") &&
- HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTSORTDYNAMICPARTITION) &&
- !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTLISTBUCKETING)) {
+ if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.DYNAMICPARTITIONING)
+ && HiveConf.getVar(hiveConf, HiveConf.ConfVars.DYNAMICPARTITIONINGMODE)
+ .equals("nonstrict")
+ && HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVEOPTSORTDYNAMICPARTITION)
+ && !HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVEOPTLISTBUCKETING)) {
transformations.add(new SortedDynPartitionOptimizer());
}
- if(HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTREDUCEDEDUPLICATION)) {
+ if (HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVEOPTREDUCEDEDUPLICATION)) {
transformations.add(new ReduceSinkDeDuplication());
}
transformations.add(new NonBlockingOpDeDupProc());
if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVELIMITOPTENABLE)) {
transformations.add(new GlobalLimitOptimizer());
}
- if(HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCORRELATION) &&
- !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEGROUPBYSKEW) &&
- !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME)) {
+ if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTCORRELATION)
+ && !HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEGROUPBYSKEW)
+ && !HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVE_OPTIMIZE_SKEWJOIN_COMPILETIME)) {
transformations.add(new CorrelationOptimizer());
}
- if (HiveConf.getFloatVar(hiveConf, HiveConf.ConfVars.HIVELIMITPUSHDOWNMEMORYUSAGE) > 0) {
+ if (HiveConf.getFloatVar(hiveConf,
+ HiveConf.ConfVars.HIVELIMITPUSHDOWNMEMORYUSAGE) > 0) {
transformations.add(new LimitPushdownOptimizer());
}
- if(HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES)) {
+ if (HiveConf.getBoolVar(hiveConf,
+ HiveConf.ConfVars.HIVEOPTIMIZEMETADATAQUERIES)) {
transformations.add(new StatsOptimizer());
}
- if (pctx.getContext().getExplain() ||
- HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
+ if (pctx.getContext().getExplain()
+ || HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVE_EXECUTION_ENGINE)
+ .equals("tez")) {
transformations.add(new AnnotateWithStatistics());
transformations.add(new AnnotateWithOpTraits());
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
index 1ba6c2e..2420971 100755
--- ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
+++ ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
@@ -34,6 +34,7 @@
*/
public class ExprNodeConstantDesc extends ExprNodeDesc implements Serializable {
private static final long serialVersionUID = 1L;
+ final protected transient static char[] hexArray = "0123456789ABCDEF".toCharArray();
private Object value;
public ExprNodeConstantDesc() {
@@ -83,6 +84,15 @@ public String getExprString() {
if (typeInfo.getTypeName().equals(serdeConstants.STRING_TYPE_NAME)) {
return "'" + value.toString() + "'";
+ } else if (typeInfo.getTypeName().equals(serdeConstants.BINARY_TYPE_NAME)) {
+ byte[] bytes = (byte[]) value;
+ char[] hexChars = new char[bytes.length * 2];
+ for (int j = 0; j < bytes.length; j++) {
+ int v = bytes[j] & 0xFF;
+ hexChars[j * 2] = hexArray[v >>> 4];
+ hexChars[j * 2 + 1] = hexArray[v & 0x0F];
+ }
+ return new String(hexChars);
} else {
return value.toString();
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
index e0d6aaf..bf3a5d7 100644
--- ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
+++ ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
@@ -21,11 +21,14 @@
import java.util.LinkedHashMap;
import java.util.Map;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
import org.apache.hadoop.hive.ql.exec.FilterOperator;
import org.apache.hadoop.hive.ql.exec.LateralViewForwardOperator;
import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;
import org.apache.hadoop.hive.ql.exec.LimitOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.PTFOperator;
import org.apache.hadoop.hive.ql.exec.ScriptOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
@@ -77,6 +80,7 @@
*/
public class PredicatePushDown implements Transform {
+ private static final Log LOG = LogFactory.getLog(PredicatePushDown.class);
private ParseContext pGraphContext;
@Override
@@ -126,6 +130,7 @@ public ParseContext transform(ParseContext pctx) throws SemanticException {
topNodes.addAll(pGraphContext.getTopOps().values());
ogw.startWalking(topNodes, null);
+ LOG.debug("After PPD:\n" + Operator.toString(pctx.getTopOps().values()));
return pGraphContext;
}
diff --git ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
index faa2387..1aab06f 100644
--- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
+++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
@@ -255,6 +255,10 @@ public static int estimateRowSizeFromSchema(HiveConf conf, List sche
int avgRowSize = 0;
for (String neededCol : neededColumns) {
ColumnInfo ci = getColumnInfoForColumn(neededCol, schema);
+ if (ci == null) {
+ // No need to collect statistics of index columns
+ continue;
+ }
ObjectInspector oi = ci.getObjectInspector();
String colType = ci.getTypeName();
if (colType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)
diff --git ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java
index a2e0cd7..5f484cf 100644
--- ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java
+++ ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDFCurrentDB.java
@@ -54,7 +54,7 @@ public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumen
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
- throw new IllegalStateException("never");
+ return SessionState.get().getCurrentDatabase();
}
@Override
diff --git ql/src/test/queries/clientpositive/constprog1.q ql/src/test/queries/clientpositive/constprog1.q
new file mode 100644
index 0000000..b8adccc
--- /dev/null
+++ ql/src/test/queries/clientpositive/constprog1.q
@@ -0,0 +1,9 @@
+set hive.fetch.task.conversion=more;
+set hive.optimize.constant.propagation=true;
+
+EXPLAIN
+SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows);
+
+SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows);
diff --git ql/src/test/queries/clientpositive/constprog2.q ql/src/test/queries/clientpositive/constprog2.q
new file mode 100644
index 0000000..72ce5a3
--- /dev/null
+++ ql/src/test/queries/clientpositive/constprog2.q
@@ -0,0 +1,10 @@
+set hive.fetch.task.conversion=more;
+set hive.optimize.constant.propagation=true;
+
+EXPLAIN
+SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86;
+
+SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86;
+
diff --git ql/src/test/queries/clientpositive/constprog_dp.q ql/src/test/queries/clientpositive/constprog_dp.q
new file mode 100644
index 0000000..7dad040
--- /dev/null
+++ ql/src/test/queries/clientpositive/constprog_dp.q
@@ -0,0 +1,11 @@
+set hive.optimize.constant.propagation=true;
+set hive.exec.dynamic.partition.mode=nonstrict;
+
+create table dest(key string, value string) partitioned by (ds string);
+
+EXPLAIN
+from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08';
+
+from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08';
diff --git ql/src/test/queries/clientpositive/constprog_type.q ql/src/test/queries/clientpositive/constprog_type.q
new file mode 100644
index 0000000..93249ad
--- /dev/null
+++ ql/src/test/queries/clientpositive/constprog_type.q
@@ -0,0 +1,14 @@
+set hive.optimize.constant.propagation=true;
+
+CREATE TABLE dest1(d date, t timestamp);
+
+EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows);
+
+INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows);
+
+SELECT * FROM dest1;
diff --git ql/src/test/queries/clientpositive/subquery_views.q ql/src/test/queries/clientpositive/subquery_views.q
index 80f8604..32d09d2 100644
--- ql/src/test/queries/clientpositive/subquery_views.q
+++ ql/src/test/queries/clientpositive/subquery_views.q
@@ -26,6 +26,11 @@ where b.key not in
)
;
+explain
+select *
+from cv2 where cv2.key in (select key from cv2 c where c.key < '11');
+;
+
select *
from cv2 where cv2.key in (select key from cv2 c where c.key < '11');
;
diff --git ql/src/test/queries/clientpositive/union27.q ql/src/test/queries/clientpositive/union27.q
index e0fccfc..c039e9c 100644
--- ql/src/test/queries/clientpositive/union27.q
+++ ql/src/test/queries/clientpositive/union27.q
@@ -1,4 +1,5 @@
create table jackson_sev_same as select * from src;
create table dim_pho as select * from src;
create table jackson_sev_add as select * from src;
+explain select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97;
select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97;
diff --git ql/src/test/results/clientpositive/annotate_stats_filter.q.out ql/src/test/results/clientpositive/annotate_stats_filter.q.out
index c7d58f6..02eb923 100644
--- ql/src/test/results/clientpositive/annotate_stats_filter.q.out
+++ ql/src/test/results/clientpositive/annotate_stats_filter.q.out
@@ -136,7 +136,7 @@ STAGE PLANS:
predicate: (state = 'OH') (type: boolean)
Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int)
+ expressions: 'OH' (type: string), locid (type: int), zip (type: bigint), year (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 4 Data size: 398 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -272,7 +272,7 @@ STAGE PLANS:
predicate: (state = 'OH') (type: boolean)
Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int)
+ expressions: 'OH' (type: string), locid (type: int), zip (type: bigint), year (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -652,7 +652,7 @@ STAGE PLANS:
predicate: zip is null (type: boolean)
Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int)
+ expressions: state (type: string), locid (type: int), null (type: void), year (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -1144,102 +1144,21 @@ TOK_QUERY
STAGE DEPENDENCIES:
- Stage-1 is a root stage
- Stage-0 depends on stages: Stage-1
+ Stage-0 is a root stage
STAGE PLANS:
- Stage: Stage-1
- Map Reduce
- Map Operator Tree:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
TableScan
alias: loc_orc
Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
- Filter Operator
- isSamplingPred: false
- predicate: (not false) (type: boolean)
- Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE
- File Output Operator
- compressed: false
- GlobalTableId: 0
-#### A masked pattern was here ####
- NumFilesPerFileSink: 1
- Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE
-#### A masked pattern was here ####
- table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- properties:
- columns _col0,_col1,_col2,_col3
- columns.types string:int:bigint:int
- escape.delim \
- hive.serialization.extend.nesting.levels true
- serialization.format 1
- serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- TotalFiles: 1
- GatherStats: false
- MultiFileSpray: false
- Path -> Alias:
-#### A masked pattern was here ####
- Path -> Partition:
-#### A masked pattern was here ####
- Partition
- base file name: loc_orc
- input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
- output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns state,locid,zip,year
- columns.comments
- columns.types string:int:bigint:int
- field.delim |
-#### A masked pattern was here ####
- name default.loc_orc
- numFiles 1
- numRows 8
- rawDataSize 796
- serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year}
- serialization.format |
- serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 493
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
-
- input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
- output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
- properties:
- COLUMN_STATS_ACCURATE true
- bucket_count -1
- columns state,locid,zip,year
- columns.comments
- columns.types string:int:bigint:int
- field.delim |
-#### A masked pattern was here ####
- name default.loc_orc
- numFiles 1
- numRows 8
- rawDataSize 796
- serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year}
- serialization.format |
- serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
- totalSize 493
-#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
- name: default.loc_orc
- name: default.loc_orc
- Truncated Path -> Alias:
- /loc_orc [loc_orc]
-
- Stage: Stage-0
- Fetch Operator
- limit: -1
- Processor Tree:
+ Statistics: Num rows: 8 Data size: 796 Basic stats: COMPLETE Column stats: COMPLETE
ListSink
PREHOOK: query: -- numRows: 0 rawDataSize: 0
@@ -1281,7 +1200,7 @@ STAGE PLANS:
GatherStats: false
Filter Operator
isSamplingPred: false
- predicate: (not true) (type: boolean)
+ predicate: false (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
Select Operator
expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int)
@@ -1309,13 +1228,13 @@ STAGE PLANS:
GatherStats: false
MultiFileSpray: false
Path -> Alias:
-#### A masked pattern was here ####
+ -mr-10002default.loc_orc{} [loc_orc]
Path -> Partition:
-#### A masked pattern was here ####
+ -mr-10002default.loc_orc{}
Partition
base file name: loc_orc
- input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
- output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
+ input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
properties:
COLUMN_STATS_ACCURATE true
bucket_count -1
@@ -1330,10 +1249,10 @@ STAGE PLANS:
rawDataSize 796
serialization.ddl struct loc_orc { string state, i32 locid, i64 zip, i32 year}
serialization.format |
- serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde
+ serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe
totalSize 493
#### A masked pattern was here ####
- serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
+ serde: org.apache.hadoop.hive.serde2.NullStructSerDe
input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat
output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
@@ -1358,7 +1277,7 @@ STAGE PLANS:
name: default.loc_orc
name: default.loc_orc
Truncated Path -> Alias:
- /loc_orc [loc_orc]
+ -mr-10002default.loc_orc{} [loc_orc]
Stage: Stage-0
Fetch Operator
@@ -1550,7 +1469,7 @@ STAGE PLANS:
predicate: ((year = 2001) and year is null) (type: boolean)
Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int)
+ expressions: state (type: string), locid (type: int), zip (type: bigint), null (type: void)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 8 Data size: 804 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -1686,7 +1605,7 @@ STAGE PLANS:
predicate: (((year = 2001) and (state = 'OH')) and (state = 'FL')) (type: boolean)
Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int)
+ expressions: 'FL' (type: string), locid (type: int), zip (type: bigint), 2001 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 1 Data size: 102 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -1962,7 +1881,7 @@ STAGE PLANS:
predicate: (((year = 2001) or year is null) and (state = 'CA')) (type: boolean)
Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: int)
+ expressions: 'CA' (type: string), locid (type: int), zip (type: bigint), year (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 2 Data size: 204 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
diff --git ql/src/test/results/clientpositive/annotate_stats_part.q.out ql/src/test/results/clientpositive/annotate_stats_part.q.out
index a0b4602..043af14 100644
--- ql/src/test/results/clientpositive/annotate_stats_part.q.out
+++ ql/src/test/results/clientpositive/annotate_stats_part.q.out
@@ -965,7 +965,7 @@ STAGE PLANS:
predicate: ((year = '2001') and (year = '__HIVE_DEFAULT_PARTITION__')) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: state (type: string), locid (type: int), zip (type: bigint), year (type: string)
+ expressions: state (type: string), locid (type: int), zip (type: bigint), '__HIVE_DEFAULT_PARTITION__' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
ListSink
diff --git ql/src/test/results/clientpositive/annotate_stats_select.q.out ql/src/test/results/clientpositive/annotate_stats_select.q.out
index 5c2d3ae..8b06d0d 100644
--- ql/src/test/results/clientpositive/annotate_stats_select.q.out
+++ ql/src/test/results/clientpositive/annotate_stats_select.q.out
@@ -1699,7 +1699,7 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: unbase64('0xe23') (type: binary)
+ expressions: D317B6 (type: binary)
outputColumnNames: _col0
Statistics: Num rows: 2 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -1826,7 +1826,7 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: UDFToByte('1') (type: tinyint), UDFToShort('20') (type: smallint)
+ expressions: 1 (type: tinyint), 20 (type: smallint)
outputColumnNames: _col0, _col1
Statistics: Num rows: 2 Data size: 16 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -1949,7 +1949,7 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: CAST( '1970-12-31 15:59:58.174' AS TIMESTAMP) (type: timestamp)
+ expressions: 1970-12-31 15:59:58.174 (type: timestamp)
outputColumnNames: _col0
Statistics: Num rows: 2 Data size: 80 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -2072,7 +2072,7 @@ STAGE PLANS:
Statistics: Num rows: 2 Data size: 1686 Basic stats: COMPLETE Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: CAST( '1970-12-31 15:59:58.174' AS DATE) (type: date)
+ expressions: null (type: void)
outputColumnNames: _col0
Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
diff --git ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out
index c41eafe..4f3e8f7 100644
--- ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out
+++ ql/src/test/results/clientpositive/auto_join14_hadoop20.q.out
@@ -36,11 +36,11 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (key > 100) (type: boolean)
- Statistics: Num rows: 19 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
+ predicate: ((key > 100) and key is not null) (type: boolean)
+ Statistics: Num rows: 10 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
HashTable Sink Operator
condition expressions:
- 0 {key}
+ 0
1 {value}
keys:
0 key (type: string)
@@ -53,8 +53,8 @@ STAGE PLANS:
alias: srcpart
Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (key > 100) (type: boolean)
- Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
+ predicate: ((key > 100) and key is not null) (type: boolean)
+ Statistics: Num rows: 10 Data size: 2004 Basic stats: COMPLETE Column stats: NONE
Map Join Operator
condition map:
Inner Join 0 to 1
@@ -65,14 +65,14 @@ STAGE PLANS:
0 key (type: string)
1 key (type: string)
outputColumnNames: _col0, _col5
- Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 1102 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: UDFToInteger(_col0) (type: int), _col5 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 1102 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 20 Data size: 2093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 1102 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/cast1.q.out ql/src/test/results/clientpositive/cast1.q.out
index f6b8238..50d46ec 100644
--- ql/src/test/results/clientpositive/cast1.q.out
+++ ql/src/test/results/clientpositive/cast1.q.out
@@ -32,7 +32,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: (3 + 2) (type: int), (3.0 + 2) (type: double), (3 + 2.0) (type: double), (3.0 + 2.0) (type: double), ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0))) (type: int), UDFToBoolean(1) (type: boolean), UDFToInteger(true) (type: int)
+ expressions: 5 (type: int), 5.0 (type: double), 5.0 (type: double), 5.0 (type: double), 5 (type: int), true (type: boolean), 1 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/cluster.q.out ql/src/test/results/clientpositive/cluster.q.out
index a163dd0..aae499b 100644
--- ql/src/test/results/clientpositive/cluster.q.out
+++ ql/src/test/results/clientpositive/cluster.q.out
@@ -14,28 +14,27 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key = 10) (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '10' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '10' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -72,28 +71,27 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key = 20) (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '20' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -130,28 +128,27 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key = 20) (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '20' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -188,28 +185,27 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key = 20) (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '20' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -246,28 +242,27 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key = 20) (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '20' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -304,28 +299,27 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key = 20) (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '20' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -362,28 +356,27 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key = 20) (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col1 (type: string)
sort order: +
Map-reduce partition columns: _col1 (type: string)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string)
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string)
+ expressions: '20' (type: string), KEY.reducesinkkey0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -420,28 +413,27 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key = 20) (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '20' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 28 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -479,40 +471,38 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: y
- Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key is not null and (key = 20)) (type: boolean)
- Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key is not null and (key = 20)) (type: boolean)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
- 1 {KEY.reducesinkkey0}
- outputColumnNames: _col0, _col1, _col4
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ 0 {VALUE._col0}
+ 1
+ outputColumnNames: _col1
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ expressions: _col1 (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -528,16 +518,15 @@ STAGE PLANS:
key expressions: _col1 (type: string)
sort order: +
Map-reduce partition columns: _col1 (type: string)
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col2 (type: string)
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string)
+ expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -575,41 +564,39 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: y
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key is not null and (key = 20)) (type: boolean)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key is not null and (key = 20)) (type: boolean)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
- 1 {KEY.reducesinkkey0} {VALUE._col0}
- outputColumnNames: _col0, _col1, _col4, _col5
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ 0 {VALUE._col0}
+ 1 {VALUE._col0}
+ outputColumnNames: _col1, _col5
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ expressions: _col1 (type: string), _col5 (type: string)
+ outputColumnNames: _col1, _col3
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -625,16 +612,16 @@ STAGE PLANS:
key expressions: _col1 (type: string)
sort order: +
Map-reduce partition columns: _col1 (type: string)
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col2 (type: string), _col3 (type: string)
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col3 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string)
+ expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string), VALUE._col2 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -672,41 +659,39 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: y
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key is not null and (key = 20)) (type: boolean)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key is not null and (key = 20)) (type: boolean)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
- 1 {KEY.reducesinkkey0} {VALUE._col0}
- outputColumnNames: _col0, _col1, _col4, _col5
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ 0 {VALUE._col0}
+ 1 {VALUE._col0}
+ outputColumnNames: _col1, _col5
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ expressions: _col1 (type: string), _col5 (type: string)
+ outputColumnNames: _col1, _col3
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -719,19 +704,18 @@ STAGE PLANS:
Map Operator Tree:
TableScan
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col1 (type: string), _col2 (type: string), _col3 (type: string)
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: string), _col3 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string)
+ expressions: '20' (type: string), VALUE._col0 (type: string), '20' (type: string), VALUE._col2 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 1598 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -769,40 +753,38 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: y
- Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key is not null and (key = 20)) (type: boolean)
- Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 726 Data size: 1452 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: x
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (key is not null and (key = 20)) (type: boolean)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
- 1 {KEY.reducesinkkey0}
- outputColumnNames: _col0, _col1, _col4
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ 0 {VALUE._col0}
+ 1
+ outputColumnNames: _col1
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ expressions: _col1 (type: string), '20' (type: string)
+ outputColumnNames: _col1, _col2
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -815,19 +797,18 @@ STAGE PLANS:
Map Operator Tree:
TableScan
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string), _col2 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), VALUE._col1 (type: string)
+ expressions: '20' (type: string), VALUE._col0 (type: string), VALUE._col1 (type: string)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/column_access_stats.q.out ql/src/test/results/clientpositive/column_access_stats.q.out
index b7141bb..d2e9a76 100644
--- ql/src/test/results/clientpositive/column_access_stats.q.out
+++ ql/src/test/results/clientpositive/column_access_stats.q.out
@@ -531,7 +531,6 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: key (type: string)
Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
- value expressions: val (type: string)
TableScan
alias: t1
Statistics: Num rows: 0 Data size: 30 Basic stats: PARTIAL Column stats: NONE
@@ -543,18 +542,17 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: key (type: string)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- value expressions: val (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
- 1 {KEY.reducesinkkey0} {VALUE._col0}
- outputColumnNames: _col0, _col1, _col4, _col5
+ 0 {KEY.reducesinkkey0}
+ 1 {KEY.reducesinkkey0}
+ outputColumnNames: _col0, _col4
Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string)
+ expressions: _col0 (type: string), '3' (type: string), _col4 (type: string), '3' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/combine2_hadoop20.q.out ql/src/test/results/clientpositive/combine2_hadoop20.q.out
index e9947d9..e7d3c26 100644
--- ql/src/test/results/clientpositive/combine2_hadoop20.q.out
+++ ql/src/test/results/clientpositive/combine2_hadoop20.q.out
@@ -208,6 +208,7 @@ STAGE PLANS:
Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
tag: -1
value expressions: _col0 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
diff --git ql/src/test/results/clientpositive/constprog1.q.out ql/src/test/results/clientpositive/constprog1.q.out
new file mode 100644
index 0000000..78c03e2
--- /dev/null
+++ ql/src/test/results/clientpositive/constprog1.q.out
@@ -0,0 +1,37 @@
+PREHOOK: query: EXPLAIN
+SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-0 is a root stage
+
+STAGE PLANS:
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ TableScan
+ alias: src
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: 'F1' (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+ ListSink
+
+PREHOOK: query: SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT IF(INSTR(CONCAT('foo', 'bar'), 'foob') > 0, "F1", "B1")
+ FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+F1
diff --git ql/src/test/results/clientpositive/constprog2.q.out ql/src/test/results/clientpositive/constprog2.q.out
new file mode 100644
index 0000000..a4a4cf4
--- /dev/null
+++ ql/src/test/results/clientpositive/constprog2.q.out
@@ -0,0 +1,75 @@
+PREHOOK: query: EXPLAIN
+SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: src2
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((key = 86) and key is not null) (type: boolean)
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: '86' (type: string)
+ sort order: +
+ Statistics: Num rows: 14 Data size: 1453 Basic stats: COMPLETE Column stats: NONE
+ value expressions: value (type: string)
+ TableScan
+ alias: src1
+ Statistics: Num rows: 2906 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((key = 86) and key is not null) (type: boolean)
+ Statistics: Num rows: 727 Data size: 1454 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: '86' (type: string)
+ sort order: +
+ Statistics: Num rows: 727 Data size: 1454 Basic stats: COMPLETE Column stats: NONE
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0
+ 1 {VALUE._col0}
+ outputColumnNames: _col5
+ Statistics: Num rows: 799 Data size: 1599 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: '86' (type: string), 87.0 (type: double), _col5 (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 799 Data size: 1599 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 799 Data size: 1599 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+PREHOOK: query: SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT src1.key, src1.key + 1, src2.value
+ FROM src src1 join src src2 ON src1.key = src2.key AND src1.key = 86
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+86 87.0 val_86
diff --git ql/src/test/results/clientpositive/constprog_dp.q.out ql/src/test/results/clientpositive/constprog_dp.q.out
new file mode 100644
index 0000000..adff821
--- /dev/null
+++ ql/src/test/results/clientpositive/constprog_dp.q.out
@@ -0,0 +1,80 @@
+PREHOOK: query: create table dest(key string, value string) partitioned by (ds string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: create table dest(key string, value string) partitioned by (ds string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest
+PREHOOK: query: EXPLAIN
+from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+ Stage-2 depends on stages: Stage-0
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: srcpart
+ Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), value (type: string), ds (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col2 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col2 (type: string)
+ Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ Reduce Operator Tree:
+ Extract
+ Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ partition:
+ ds
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+PREHOOK: query: from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'
+PREHOOK: type: QUERY
+PREHOOK: Input: default@srcpart
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+PREHOOK: Output: default@dest
+POSTHOOK: query: from srcpart
+insert overwrite table dest partition (ds) select key, value, ds where ds='2008-04-08'
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@srcpart
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11
+POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12
+POSTHOOK: Output: default@dest@ds=2008-04-08
+POSTHOOK: Lineage: dest PARTITION(ds=2008-04-08).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ]
+POSTHOOK: Lineage: dest PARTITION(ds=2008-04-08).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ]
diff --git ql/src/test/results/clientpositive/constprog_type.q.out ql/src/test/results/clientpositive/constprog_type.q.out
new file mode 100644
index 0000000..2a0047a
--- /dev/null
+++ ql/src/test/results/clientpositive/constprog_type.q.out
@@ -0,0 +1,123 @@
+PREHOOK: query: CREATE TABLE dest1(d date, t timestamp)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+POSTHOOK: query: CREATE TABLE dest1(d date, t timestamp)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@dest1
+PREHOOK: query: EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+POSTHOOK: query: EXPLAIN
+INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-7 depends on stages: Stage-1 , consists of Stage-4, Stage-3, Stage-5
+ Stage-4
+ Stage-0 depends on stages: Stage-4, Stage-3, Stage-6
+ Stage-2 depends on stages: Stage-0
+ Stage-3
+ Stage-5
+ Stage-6 depends on stages: Stage-5
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: src
+ Row Limit Per Split: 1
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+ Select Operator
+ expressions: 2013-11-17 (type: date), 2011-04-29 20:46:56.4485 (type: timestamp)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest1
+
+ Stage: Stage-7
+ Conditional Operator
+
+ Stage: Stage-4
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+ Stage: Stage-0
+ Move Operator
+ tables:
+ replace: true
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest1
+
+ Stage: Stage-2
+ Stats-Aggr Operator
+
+ Stage: Stage-3
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest1
+
+ Stage: Stage-5
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest1
+
+ Stage: Stage-6
+ Move Operator
+ files:
+ hdfs directory: true
+#### A masked pattern was here ####
+
+PREHOOK: query: INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+PREHOOK: Output: default@dest1
+POSTHOOK: query: INSERT OVERWRITE TABLE dest1
+SELECT cast('2013-11-17' as date), cast(cast('1.3041352164485E9' as double) as timestamp)
+ FROM src tablesample (1 rows)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+POSTHOOK: Output: default@dest1
+POSTHOOK: Lineage: dest1.d EXPRESSION []
+POSTHOOK: Lineage: dest1.t EXPRESSION []
+PREHOOK: query: SELECT * FROM dest1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@dest1
+#### A masked pattern was here ####
+POSTHOOK: query: SELECT * FROM dest1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@dest1
+#### A masked pattern was here ####
+2013-11-17 2011-04-29 20:46:56.4485
diff --git ql/src/test/results/clientpositive/create_view.q.out ql/src/test/results/clientpositive/create_view.q.out
index e193a4f..84df43e 100644
--- ql/src/test/results/clientpositive/create_view.q.out
+++ ql/src/test/results/clientpositive/create_view.q.out
@@ -186,7 +186,7 @@ STAGE PLANS:
predicate: (key = 18) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '18' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/cross_product_check_1.q.out ql/src/test/results/clientpositive/cross_product_check_1.q.out
index fec2f7b..9268ba9 100644
--- ql/src/test/results/clientpositive/cross_product_check_1.q.out
+++ ql/src/test/results/clientpositive/cross_product_check_1.q.out
@@ -220,18 +220,15 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -247,17 +244,17 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Group By Operator
keys: KEY._col0 (type: string)
mode: mergepartial
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -271,7 +268,7 @@ STAGE PLANS:
TableScan
Reduce Output Operator
sort order:
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: string)
TableScan
alias: a
@@ -345,18 +342,15 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -372,17 +366,17 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Group By Operator
keys: KEY._col0 (type: string)
mode: mergepartial
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -396,7 +390,7 @@ STAGE PLANS:
TableScan
Reduce Output Operator
sort order:
- Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: string)
TableScan
alias: a
@@ -483,18 +477,15 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -510,17 +501,17 @@ STAGE PLANS:
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Group By Operator
keys: KEY._col0 (type: string)
mode: mergepartial
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -534,7 +525,7 @@ STAGE PLANS:
TableScan
Reduce Output Operator
sort order:
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: string)
TableScan
Reduce Output Operator
diff --git ql/src/test/results/clientpositive/cross_product_check_2.q.out ql/src/test/results/clientpositive/cross_product_check_2.q.out
index 74abda7..994bca1 100644
--- ql/src/test/results/clientpositive/cross_product_check_2.q.out
+++ ql/src/test/results/clientpositive/cross_product_check_2.q.out
@@ -244,23 +244,20 @@ STAGE PLANS:
1 key (type: string)
outputColumnNames: _col0
Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Local Work:
Map Reduce Local Work
Reduce Operator Tree:
@@ -268,11 +265,11 @@ STAGE PLANS:
keys: KEY._col0 (type: string)
mode: mergepartial
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -384,23 +381,20 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE
Local Work:
Map Reduce Local Work
Reduce Operator Tree:
@@ -408,11 +402,11 @@ STAGE PLANS:
keys: KEY._col0 (type: string)
mode: mergepartial
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -630,7 +624,7 @@ STAGE PLANS:
TableScan
Reduce Output Operator
sort order:
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: string)
TableScan
Reduce Output Operator
@@ -700,23 +694,20 @@ STAGE PLANS:
1 key (type: string)
outputColumnNames: _col0
Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 5 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Local Work:
Map Reduce Local Work
Reduce Operator Tree:
@@ -724,11 +715,11 @@ STAGE PLANS:
keys: KEY._col0 (type: string)
mode: mergepartial
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 1 Data size: 11 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 22 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
diff --git ql/src/test/results/clientpositive/cte_1.q.out ql/src/test/results/clientpositive/cte_1.q.out
index afd0d4f..df679b9 100644
--- ql/src/test/results/clientpositive/cte_1.q.out
+++ ql/src/test/results/clientpositive/cte_1.q.out
@@ -23,7 +23,7 @@ STAGE PLANS:
predicate: (key = '5') (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string)
+ expressions: '5' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -80,7 +80,7 @@ STAGE PLANS:
predicate: (key = '5') (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string)
+ expressions: '5' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -137,7 +137,7 @@ STAGE PLANS:
predicate: (key = '5') (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string)
+ expressions: '5' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/explain_logical.q.out ql/src/test/results/clientpositive/explain_logical.q.out
index bb26e8c..e803b51 100644
--- ql/src/test/results/clientpositive/explain_logical.q.out
+++ ql/src/test/results/clientpositive/explain_logical.q.out
@@ -713,7 +713,7 @@ v5:srcpart
predicate: (ds = '10') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator (SEL_2)
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: key (type: string), value (type: string), '10' (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator (FS_4)
diff --git ql/src/test/results/clientpositive/groupby_ppd.q.out ql/src/test/results/clientpositive/groupby_ppd.q.out
index e7ddc41..6ac83cf 100644
--- ql/src/test/results/clientpositive/groupby_ppd.q.out
+++ ql/src/test/results/clientpositive/groupby_ppd.q.out
@@ -26,13 +26,13 @@ STAGE PLANS:
predicate: (bar = 1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: bar (type: int), foo (type: int)
- outputColumnNames: _col0, _col1
+ expressions: foo (type: int)
+ outputColumnNames: _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Union
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col1 (type: int), _col0 (type: int)
+ expressions: _col1 (type: int), 1 (type: int)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
@@ -52,13 +52,13 @@ STAGE PLANS:
predicate: (bar = 1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: bar (type: int), foo (type: int)
- outputColumnNames: _col0, _col1
+ expressions: foo (type: int)
+ outputColumnNames: _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Union
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col1 (type: int), _col0 (type: int)
+ expressions: _col1 (type: int), 1 (type: int)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
diff --git ql/src/test/results/clientpositive/groupby_sort_1.q.out ql/src/test/results/clientpositive/groupby_sort_1.q.out
index bee863b..8420fdd 100644
--- ql/src/test/results/clientpositive/groupby_sort_1.q.out
+++ ql/src/test/results/clientpositive/groupby_sort_1.q.out
@@ -470,6 +470,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -1873,6 +1874,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col3 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -2093,6 +2095,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -2359,6 +2362,7 @@ STAGE PLANS:
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -3139,6 +3143,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -3760,52 +3765,62 @@ STAGE PLANS:
alias: t1
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string)
outputColumnNames: key
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: key (type: string)
mode: final
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
tag: 0
- value expressions: _col0 (type: string), _col1 (type: bigint)
+ value expressions: _col1 (type: bigint)
+ auto parallelism: false
TableScan
alias: t1
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string)
outputColumnNames: key
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: key (type: string)
mode: final
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
tag: 1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -3865,20 +3880,20 @@ STAGE PLANS:
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col1}
+ 0 {KEY.reducesinkkey0} {VALUE._col0}
+ 1 {VALUE._col0}
outputColumnNames: _col0, _col1, _col3
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: UDFToInteger(_col0) (type: int), UDFToInteger((_col1 + _col3)) (type: int)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 1
#### A masked pattern was here ####
NumFilesPerFileSink: 1
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -4070,23 +4085,28 @@ STAGE PLANS:
alias: t1
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), val (type: string)
outputColumnNames: key, val
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: key (type: string), val (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4147,11 +4167,11 @@ STAGE PLANS:
keys: KEY._col0 (type: string), KEY._col1 (type: string)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
@@ -4177,36 +4197,42 @@ STAGE PLANS:
alias: t1
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string)
outputColumnNames: key
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: key (type: string)
mode: final
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
tag: 0
- value expressions: _col0 (type: string), _col1 (type: bigint)
+ value expressions: _col1 (type: bigint)
+ auto parallelism: false
TableScan
GatherStats: false
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
tag: 1
- value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
+ value expressions: _col1 (type: string), _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4287,20 +4313,20 @@ STAGE PLANS:
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 0 {KEY.reducesinkkey0} {VALUE._col0}
+ 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: string), _col3 (type: string), _col4 (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -4410,6 +4436,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -6421,8 +6448,9 @@ STAGE DEPENDENCIES:
Stage-2 is a root stage
Stage-0 depends on stages: Stage-2
Stage-3 depends on stages: Stage-0
- Stage-1 depends on stages: Stage-2
- Stage-4 depends on stages: Stage-1
+ Stage-4 depends on stages: Stage-2
+ Stage-1 depends on stages: Stage-4
+ Stage-5 depends on stages: Stage-1
STAGE PLANS:
Stage: Stage-2
@@ -6435,16 +6463,15 @@ STAGE PLANS:
predicate: (key = 8) (type: boolean)
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), val (type: string)
- outputColumnNames: _col0, _col1
+ expressions: val (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string)
+ expressions: '8' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
- bucketGroup: true
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -6456,27 +6483,21 @@ STAGE PLANS:
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: bigint)
Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
+ expressions: '8' (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: _col0 (type: string), _col1 (type: string)
- mode: final
- outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToInteger(_col2) (type: int)
+ mode: hash
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: true
- Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.dest2
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Reduce Operator Tree:
Group By Operator
aggregations: count(VALUE._col0)
@@ -6510,6 +6531,36 @@ STAGE PLANS:
Stage: Stage-3
Stats-Aggr Operator
+ Stage: Stage-4
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col2 (type: bigint)
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ keys: KEY._col0 (type: string), KEY._col1 (type: string)
+ mode: mergepartial
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToInteger(_col2) (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: true
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest2
+
Stage: Stage-1
Move Operator
tables:
@@ -6520,7 +6571,7 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest2
- Stage: Stage-4
+ Stage: Stage-5
Stats-Aggr Operator
PREHOOK: query: FROM (select key, val from T2 where key = 8) x
diff --git ql/src/test/results/clientpositive/groupby_sort_1_23.q.out ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
index 8b7965b..b963527 100644
--- ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
+++ ql/src/test/results/clientpositive/groupby_sort_1_23.q.out
@@ -6464,11 +6464,11 @@ STAGE PLANS:
predicate: (key = 8) (type: boolean)
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), val (type: string)
- outputColumnNames: _col0, _col1
+ expressions: val (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string)
+ expressions: '8' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
@@ -6485,7 +6485,7 @@ STAGE PLANS:
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: bigint)
Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
+ expressions: '8' (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
diff --git ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
index fa813dc..5b153b9 100644
--- ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
+++ ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out
@@ -471,6 +471,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -562,6 +563,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -1938,6 +1940,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col3 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -2029,6 +2032,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col3 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -2222,6 +2226,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -2313,6 +2318,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -2552,6 +2558,7 @@ STAGE PLANS:
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -2643,6 +2650,7 @@ STAGE PLANS:
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -3396,6 +3404,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -3487,6 +3496,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4080,52 +4090,62 @@ STAGE PLANS:
alias: t1
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string)
outputColumnNames: key
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: key (type: string)
mode: final
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
tag: 0
- value expressions: _col0 (type: string), _col1 (type: bigint)
+ value expressions: _col1 (type: bigint)
+ auto parallelism: false
TableScan
alias: t1
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string)
outputColumnNames: key
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: key (type: string)
mode: final
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
tag: 1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4185,20 +4205,20 @@ STAGE PLANS:
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col1}
+ 0 {KEY.reducesinkkey0} {VALUE._col0}
+ 1 {VALUE._col0}
outputColumnNames: _col0, _col1, _col3
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: UDFToInteger(_col0) (type: int), UDFToInteger((_col1 + _col3)) (type: int)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 1
#### A masked pattern was here ####
NumFilesPerFileSink: 1
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -4391,23 +4411,28 @@ STAGE PLANS:
alias: t1
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), val (type: string)
outputColumnNames: key, val
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: key (type: string), val (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: rand() (type: double)
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4468,7 +4493,7 @@ STAGE PLANS:
keys: KEY._col0 (type: string), KEY._col1 (type: string)
mode: partials
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
@@ -4496,9 +4521,10 @@ STAGE PLANS:
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4531,11 +4557,11 @@ STAGE PLANS:
keys: KEY._col0 (type: string), KEY._col1 (type: string)
mode: final
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
@@ -4561,36 +4587,42 @@ STAGE PLANS:
alias: t1
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
+ Filter Operator
+ isSamplingPred: false
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string)
outputColumnNames: key
- Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: key (type: string)
mode: final
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
tag: 0
- value expressions: _col0 (type: string), _col1 (type: bigint)
+ value expressions: _col1 (type: bigint)
+ auto parallelism: false
TableScan
GatherStats: false
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
tag: 1
- value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
+ value expressions: _col1 (type: string), _col2 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4671,20 +4703,20 @@ STAGE PLANS:
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2}
+ 0 {KEY.reducesinkkey0} {VALUE._col0}
+ 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint), _col2 (type: string), _col3 (type: string), _col4 (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
- Statistics: Num rows: 3 Data size: 13 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -4795,6 +4827,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -4886,6 +4919,7 @@ STAGE PLANS:
Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: bigint)
+ auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
Path -> Partition:
@@ -6897,8 +6931,10 @@ STAGE DEPENDENCIES:
Stage-3 depends on stages: Stage-2
Stage-0 depends on stages: Stage-3
Stage-4 depends on stages: Stage-0
- Stage-1 depends on stages: Stage-3
- Stage-5 depends on stages: Stage-1
+ Stage-5 depends on stages: Stage-2
+ Stage-6 depends on stages: Stage-5
+ Stage-1 depends on stages: Stage-6
+ Stage-7 depends on stages: Stage-1
STAGE PLANS:
Stage: Stage-2
@@ -6911,16 +6947,15 @@ STAGE PLANS:
predicate: (key = 8) (type: boolean)
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), val (type: string)
- outputColumnNames: _col0, _col1
+ expressions: val (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string)
+ expressions: '8' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
- bucketGroup: true
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0, _col1
@@ -6932,27 +6967,21 @@ STAGE PLANS:
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: bigint)
Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
+ expressions: '8' (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
keys: _col0 (type: string), _col1 (type: string)
- mode: final
- outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
- Select Operator
- expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToInteger(_col2) (type: int)
+ mode: hash
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: true
- Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
table:
- input format: org.apache.hadoop.mapred.TextInputFormat
- output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
- serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
- name: default.dest2
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
Reduce Operator Tree:
Group By Operator
aggregations: count(VALUE._col0)
@@ -7010,6 +7039,60 @@ STAGE PLANS:
Stage: Stage-4
Stats-Aggr Operator
+ Stage: Stage-5
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: rand() (type: double)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col2 (type: bigint)
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ keys: KEY._col0 (type: string), KEY._col1 (type: string)
+ mode: partials
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: true
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-6
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string)
+ sort order: ++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
+ Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col2 (type: bigint)
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ keys: KEY._col0 (type: string), KEY._col1 (type: string)
+ mode: final
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: UDFToInteger(_col0) (type: int), _col1 (type: string), UDFToInteger(_col2) (type: int)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: true
+ Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+ name: default.dest2
+
Stage: Stage-1
Move Operator
tables:
@@ -7020,7 +7103,7 @@ STAGE PLANS:
serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
name: default.dest2
- Stage: Stage-5
+ Stage: Stage-7
Stats-Aggr Operator
PREHOOK: query: FROM (select key, val from T2 where key = 8) x
diff --git ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
index 48b9fde..fc4cdcc 100644
--- ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
+++ ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out
@@ -6945,11 +6945,11 @@ STAGE PLANS:
predicate: (key = 8) (type: boolean)
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), val (type: string)
- outputColumnNames: _col0, _col1
+ expressions: val (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string)
+ expressions: '8' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
@@ -6966,7 +6966,7 @@ STAGE PLANS:
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: bigint)
Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
+ expressions: '8' (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
diff --git ql/src/test/results/clientpositive/infer_const_type.q.out ql/src/test/results/clientpositive/infer_const_type.q.out
index 66de332..0ef7da7 100644
--- ql/src/test/results/clientpositive/infer_const_type.q.out
+++ ql/src/test/results/clientpositive/infer_const_type.q.out
@@ -61,7 +61,7 @@ STAGE PLANS:
predicate: (((((((ti = 127) and (si = 32767)) and (i = 12345)) and (bi = -12345)) and (fl = 906.0)) and (db = -307.0)) and (str = 1234)) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string)
+ expressions: 127 (type: tinyint), 32767 (type: smallint), 12345 (type: int), -12345 (type: bigint), 906.0 (type: float), -307.0 (type: double), '1234' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -135,7 +135,7 @@ STAGE PLANS:
alias: infertypes
Statistics: Num rows: 0 Data size: 117 Basic stats: PARTIAL Column stats: NONE
Filter Operator
- predicate: (((((false or false) or false) or false) or false) or false) (type: boolean)
+ predicate: false (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string)
@@ -199,7 +199,7 @@ STAGE PLANS:
alias: infertypes
Statistics: Num rows: 0 Data size: 117 Basic stats: PARTIAL Column stats: NONE
Filter Operator
- predicate: ((false or false) or false) (type: boolean)
+ predicate: false (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string)
@@ -258,7 +258,7 @@ STAGE PLANS:
predicate: (((ti < 127.0) and (i > 100.0)) and (str = 1.57)) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), str (type: string)
+ expressions: ti (type: tinyint), si (type: smallint), i (type: int), bi (type: bigint), fl (type: float), db (type: double), '1.57' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/input18.q.out ql/src/test/results/clientpositive/input18.q.out
index de980d6..f9875b6 100644
--- ql/src/test/results/clientpositive/input18.q.out
+++ ql/src/test/results/clientpositive/input18.q.out
@@ -36,7 +36,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), (1 + 2) (type: int), (3 + 4) (type: int)
+ expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Transform Operator
diff --git ql/src/test/results/clientpositive/input23.q.out ql/src/test/results/clientpositive/input23.q.out
index e82cff0..090d5b5 100644
--- ql/src/test/results/clientpositive/input23.q.out
+++ ql/src/test/results/clientpositive/input23.q.out
@@ -133,11 +133,11 @@ STAGE PLANS:
Inner Join 0 to 1
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3}
- outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7, _col8, _col9
+ 1 {VALUE._col0} {VALUE._col1}
+ outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7
Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string), _col8 (type: string), _col9 (type: string)
+ expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string), '2008-04-08' (type: string), '14' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Limit
diff --git ql/src/test/results/clientpositive/input26.q.out ql/src/test/results/clientpositive/input26.q.out
index ea37b7e..1adda31 100644
--- ql/src/test/results/clientpositive/input26.q.out
+++ ql/src/test/results/clientpositive/input26.q.out
@@ -85,7 +85,7 @@ STAGE PLANS:
Map Reduce
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), VALUE._col3 (type: string)
+ expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), '2008-04-08' (type: string), '14' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Limit
diff --git ql/src/test/results/clientpositive/input38.q.out ql/src/test/results/clientpositive/input38.q.out
index 0b29370..8249a63 100644
--- ql/src/test/results/clientpositive/input38.q.out
+++ ql/src/test/results/clientpositive/input38.q.out
@@ -39,7 +39,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), (1 + 2) (type: int), (3 + 4) (type: int)
+ expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Transform Operator
diff --git ql/src/test/results/clientpositive/input39_hadoop20.q.out ql/src/test/results/clientpositive/input39_hadoop20.q.out
index c5e1615..d7b92e0 100644
--- ql/src/test/results/clientpositive/input39_hadoop20.q.out
+++ ql/src/test/results/clientpositive/input39_hadoop20.q.out
@@ -73,24 +73,24 @@ STAGE PLANS:
alias: t2
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (((hash(rand(460476415)) & 2147483647) % 32) = 0) (type: boolean)
- Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+ predicate: ((((hash(rand(460476415)) & 2147483647) % 32) = 0) and key is not null) (type: boolean)
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: key (type: string)
sort order: +
Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: t1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (((hash(rand(460476415)) & 2147483647) % 32) = 0) (type: boolean)
- Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+ predicate: ((((hash(rand(460476415)) & 2147483647) % 32) = 0) and key is not null) (type: boolean)
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: key (type: string)
sort order: +
Map-reduce partition columns: key (type: string)
- Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Join Operator
condition map:
@@ -98,9 +98,9 @@ STAGE PLANS:
condition expressions:
0
1
- Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
Select Operator
- Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 137 Data size: 1460 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count(1)
mode: hash
diff --git ql/src/test/results/clientpositive/input6.q.out ql/src/test/results/clientpositive/input6.q.out
index f9cf42f..927a347 100644
--- ql/src/test/results/clientpositive/input6.q.out
+++ ql/src/test/results/clientpositive/input6.q.out
@@ -34,7 +34,7 @@ STAGE PLANS:
predicate: key is null (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: null (type: void), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/input_part2.q.out ql/src/test/results/clientpositive/input_part2.q.out
index 9987769..77c412a 100644
--- ql/src/test/results/clientpositive/input_part2.q.out
+++ ql/src/test/results/clientpositive/input_part2.q.out
@@ -155,7 +155,7 @@ STAGE PLANS:
predicate: ((key < 100) and (ds = '2008-04-08')) (type: boolean)
Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: UDFToInteger(key) (type: int), value (type: string), hr (type: string), ds (type: string)
+ expressions: UDFToInteger(key) (type: int), value (type: string), hr (type: string), '2008-04-08' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -189,7 +189,7 @@ STAGE PLANS:
predicate: ((key < 100) and (ds = '2008-04-09')) (type: boolean)
Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: UDFToInteger(key) (type: int), value (type: string), hr (type: string), ds (type: string)
+ expressions: UDFToInteger(key) (type: int), value (type: string), hr (type: string), '2008-04-09' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/input_part4.q.out ql/src/test/results/clientpositive/input_part4.q.out
index dbc5cf5..85e9aa9 100644
--- ql/src/test/results/clientpositive/input_part4.q.out
+++ ql/src/test/results/clientpositive/input_part4.q.out
@@ -19,7 +19,7 @@ STAGE PLANS:
predicate: ((ds = '2008-04-08') and (hr = 15)) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: key (type: string), value (type: string), '2008-04-08' (type: string), '15' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
ListSink
diff --git ql/src/test/results/clientpositive/input_part6.q.out ql/src/test/results/clientpositive/input_part6.q.out
index 664a7ee..f05c05b 100644
--- ql/src/test/results/clientpositive/input_part6.q.out
+++ ql/src/test/results/clientpositive/input_part6.q.out
@@ -16,12 +16,12 @@ STAGE PLANS:
alias: x
Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (ds = ((2008 - 4) - 8)) (type: boolean)
- Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ predicate: (ds = 1996) (type: boolean)
+ Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: key (type: string), value (type: string), '1996' (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 10
Statistics: Num rows: 10 Data size: 2000 Basic stats: COMPLETE Column stats: NONE
diff --git ql/src/test/results/clientpositive/insert1.q.out ql/src/test/results/clientpositive/insert1.q.out
index fbbf89c..df3d1fb 100644
--- ql/src/test/results/clientpositive/insert1.q.out
+++ ql/src/test/results/clientpositive/insert1.q.out
@@ -48,10 +48,10 @@ STAGE PLANS:
alias: a
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key = (- 1)) (type: boolean)
+ predicate: (key = -1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: -1 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -137,10 +137,10 @@ STAGE PLANS:
alias: a
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key = (- 1)) (type: boolean)
+ predicate: (key = -1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: -1 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -239,10 +239,10 @@ STAGE PLANS:
alias: a
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key = (- 1)) (type: boolean)
+ predicate: (key = -1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: -1 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -328,10 +328,10 @@ STAGE PLANS:
alias: a
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key = (- 1)) (type: boolean)
+ predicate: (key = -1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: -1 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/join14_hadoop20.q.out ql/src/test/results/clientpositive/join14_hadoop20.q.out
index 8e3dd5a..9f75976 100644
--- ql/src/test/results/clientpositive/join14_hadoop20.q.out
+++ ql/src/test/results/clientpositive/join14_hadoop20.q.out
@@ -30,7 +30,7 @@ STAGE PLANS:
alias: srcpart
Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (key > 100) (type: boolean)
+ predicate: ((key > 100) and key is not null) (type: boolean)
Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: key (type: string)
@@ -42,7 +42,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (key > 100) (type: boolean)
+ predicate: ((key > 100) and key is not null) (type: boolean)
Statistics: Num rows: 19 Data size: 1903 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: key (type: string)
diff --git ql/src/test/results/clientpositive/join38.q.out ql/src/test/results/clientpositive/join38.q.out
index 2c4882f..ba0590c 100644
--- ql/src/test/results/clientpositive/join38.q.out
+++ ql/src/test/results/clientpositive/join38.q.out
@@ -72,8 +72,8 @@ STAGE PLANS:
0 {value}
1 {col5}
keys:
- 0 key (type: string)
- 1 col11 (type: string)
+ 0 '111' (type: string)
+ 1 '111' (type: string)
Stage: Stage-1
Map Reduce
@@ -91,8 +91,8 @@ STAGE PLANS:
0 {value}
1 {col5}
keys:
- 0 key (type: string)
- 1 col11 (type: string)
+ 0 '111' (type: string)
+ 1 '111' (type: string)
outputColumnNames: _col1, _col9
Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
Select Operator
diff --git ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out
index b55c102..8edf5ab 100644
--- ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out
+++ ql/src/test/results/clientpositive/join_cond_pushdown_1.q.out
@@ -323,7 +323,7 @@ STAGE PLANS:
Reduce Output Operator
sort order:
Statistics: Num rows: 1 Data size: 634 Basic stats: COMPLETE Column stats: NONE
- value expressions: p_partkey (type: int), p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string)
+ value expressions: p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string)
TableScan
alias: p1
Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
@@ -337,8 +337,8 @@ STAGE PLANS:
Inner Join 0 to 1
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
+ 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -368,18 +368,18 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: _col12 (type: string)
Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string)
+ value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18}
+ 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18}
1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string)
+ expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
index e31f3fb..867d878 100644
--- ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
+++ ql/src/test/results/clientpositive/join_cond_pushdown_3.q.out
@@ -340,7 +340,7 @@ STAGE PLANS:
Reduce Output Operator
sort order:
Statistics: Num rows: 1 Data size: 634 Basic stats: COMPLETE Column stats: NONE
- value expressions: p_partkey (type: int), p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string)
+ value expressions: p_name (type: string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), p_size (type: int), p_container (type: string), p_retailprice (type: double), p_comment (type: string)
TableScan
alias: p1
Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
@@ -354,8 +354,8 @@ STAGE PLANS:
Inner Join 0 to 1
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
+ 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -385,26 +385,26 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: _col12 (type: string)
Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string)
+ value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18}
+ 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18}
1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((_col11 = 1) and (_col23 = _col12)) (type: boolean)
- Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE
+ predicate: (_col23 = _col12) (type: boolean)
+ Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string)
+ expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
- Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out
index ea39263..e2220e1 100644
--- ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out
+++ ql/src/test/results/clientpositive/join_cond_pushdown_unqual1.q.out
@@ -377,7 +377,7 @@ STAGE PLANS:
Reduce Output Operator
sort order:
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- value expressions: p2_partkey (type: int), p2_name (type: string), p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string)
+ value expressions: p2_name (type: string), p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string)
TableScan
alias: p1
Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
@@ -391,8 +391,8 @@ STAGE PLANS:
Inner Join 0 to 1
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
+ 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -422,18 +422,18 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: _col12 (type: string)
Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string)
+ value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18}
+ 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18}
1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string)
+ expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out
index 8155a31..a98eb49 100644
--- ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out
+++ ql/src/test/results/clientpositive/join_cond_pushdown_unqual3.q.out
@@ -394,7 +394,7 @@ STAGE PLANS:
Reduce Output Operator
sort order:
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- value expressions: p2_partkey (type: int), p2_name (type: string), p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string)
+ value expressions: p2_name (type: string), p2_mfgr (type: string), p2_brand (type: string), p2_type (type: string), p2_size (type: int), p2_container (type: string), p2_retailprice (type: double), p2_comment (type: string)
TableScan
alias: p1
Statistics: Num rows: 5 Data size: 3173 Basic stats: COMPLETE Column stats: NONE
@@ -408,8 +408,8 @@ STAGE PLANS:
Inner Join 0 to 1
condition expressions:
0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
+ 1 {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8}
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -439,26 +439,26 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: _col12 (type: string)
Statistics: Num rows: 5 Data size: 3490 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string)
+ value expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {VALUE._col11} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18}
+ 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7} {VALUE._col8} {KEY.reducesinkkey0} {VALUE._col12} {VALUE._col13} {VALUE._col14} {VALUE._col15} {VALUE._col16} {VALUE._col17} {VALUE._col18}
1 {VALUE._col0} {KEY.reducesinkkey0} {VALUE._col1} {VALUE._col2} {VALUE._col3} {VALUE._col4} {VALUE._col5} {VALUE._col6} {VALUE._col7}
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
+ outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30
Statistics: Num rows: 5 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((_col11 = 1) and (_col23 = _col12)) (type: boolean)
- Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE
+ predicate: (_col23 = _col12) (type: boolean)
+ Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), _col11 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string)
+ expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: double), _col8 (type: string), 1 (type: int), _col12 (type: string), _col13 (type: string), _col14 (type: string), _col15 (type: string), _col16 (type: int), _col17 (type: string), _col18 (type: double), _col19 (type: string), _col22 (type: int), _col23 (type: string), _col24 (type: string), _col25 (type: string), _col26 (type: string), _col27 (type: int), _col28 (type: string), _col29 (type: double), _col30 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26
- Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 767 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 2 Data size: 1535 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/join_view.q.out ql/src/test/results/clientpositive/join_view.q.out
index a42d0fb..7c0a3df 100644
--- ql/src/test/results/clientpositive/join_view.q.out
+++ ql/src/test/results/clientpositive/join_view.q.out
@@ -50,8 +50,8 @@ STAGE PLANS:
Inner Join 0 to 1
condition expressions:
0 {VALUE._col1}
- 1 {VALUE._col0} {KEY.reducesinkkey0}
- outputColumnNames: _col1, _col5, _col7
+ 1 {VALUE._col0}
+ outputColumnNames: _col1, _col5
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col1 (type: string), _col5 (type: int), _col7 (type: string)
diff --git ql/src/test/results/clientpositive/list_bucket_dml_1.q.out ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
index b48c36a..b7ac4fc 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_1.q.out
@@ -448,7 +448,7 @@ STAGE PLANS:
predicate: (key = '484') (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '484' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_11.q.out ql/src/test/results/clientpositive/list_bucket_dml_11.q.out
index acf80aa..fe0f469 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_11.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_11.q.out
@@ -318,7 +318,7 @@ STAGE PLANS:
predicate: (value = 'val_466') (type: boolean)
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: key (type: string), 'val_466' (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_12.q.out ql/src/test/results/clientpositive/list_bucket_dml_12.q.out
index fa5bc92..6a2c063 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_12.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_12.q.out
@@ -331,7 +331,7 @@ STAGE PLANS:
predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean)
Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: col1 (type: string), col2 (type: string), col3 (type: string), col4 (type: string), col5 (type: string), ds (type: string), hr (type: string)
+ expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -489,7 +489,7 @@ STAGE PLANS:
predicate: ((col2 = '382') and (col4 = 'val_382')) (type: boolean)
Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: col1 (type: string), col2 (type: string), col3 (type: string), col4 (type: string), col5 (type: string), ds (type: string), hr (type: string)
+ expressions: col1 (type: string), '382' (type: string), col3 (type: string), 'val_382' (type: string), col5 (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_13.q.out ql/src/test/results/clientpositive/list_bucket_dml_13.q.out
index a5813a6..4b7863c 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_13.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_13.q.out
@@ -331,7 +331,7 @@ STAGE PLANS:
predicate: ((col2 = '466') and (col4 = 'val_466')) (type: boolean)
Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: col1 (type: string), col2 (type: string), col3 (type: string), col4 (type: string), col5 (type: string), ds (type: string), hr (type: string)
+ expressions: col1 (type: string), '466' (type: string), col3 (type: string), 'val_466' (type: string), col5 (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6
Statistics: Num rows: 125 Data size: 1578 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_14.q.out ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
index 730fa9e..29b9681 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_14.q.out
@@ -291,7 +291,7 @@ STAGE PLANS:
predicate: (key = '484') (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '484' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_2.q.out ql/src/test/results/clientpositive/list_bucket_dml_2.q.out
index 5462629..e0757fb 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_2.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_2.q.out
@@ -417,7 +417,7 @@ STAGE PLANS:
predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_3.q.out ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
index b786fc8..3e641d2 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_3.q.out
@@ -388,7 +388,7 @@ STAGE PLANS:
predicate: (key = '484') (type: boolean)
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '484' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_4.q.out ql/src/test/results/clientpositive/list_bucket_dml_4.q.out
index 083568a..90e379f 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_4.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_4.q.out
@@ -820,7 +820,7 @@ STAGE PLANS:
predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_5.q.out ql/src/test/results/clientpositive/list_bucket_dml_5.q.out
index 2db238e..9223a8c 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_5.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_5.q.out
@@ -461,7 +461,7 @@ STAGE PLANS:
predicate: ((key = '103') and (value = 'val_103')) (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '103' (type: string), 'val_103' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_6.q.out ql/src/test/results/clientpositive/list_bucket_dml_6.q.out
index c1ababc..549aa25 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_6.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_6.q.out
@@ -982,7 +982,7 @@ STAGE PLANS:
predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_7.q.out ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
index 4c905e9..6c18e2e 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_7.q.out
@@ -930,7 +930,7 @@ STAGE PLANS:
predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_8.q.out ql/src/test/results/clientpositive/list_bucket_dml_8.q.out
index ff94687..a8c007a 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_8.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_8.q.out
@@ -581,7 +581,7 @@ STAGE PLANS:
predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_dml_9.q.out ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
index 41872d7..58c62fd 100644
--- ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
+++ ql/src/test/results/clientpositive/list_bucket_dml_9.q.out
@@ -820,7 +820,7 @@ STAGE PLANS:
predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out
index 81c519e..4adfc7c 100644
--- ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out
+++ ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out
@@ -171,7 +171,7 @@ STAGE PLANS:
predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string)
+ expressions: '484' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -331,7 +331,7 @@ STAGE PLANS:
predicate: ((key = '238') and (value = 'val_238')) (type: boolean)
Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '238' (type: string), 'val_238' (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -638,7 +638,7 @@ STAGE PLANS:
predicate: (key = '495') (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '495' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out
index 9caaece..00a7365 100644
--- ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out
+++ ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out
@@ -169,7 +169,7 @@ STAGE PLANS:
predicate: (value = 'val_484') (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: key (type: string), 'val_484' (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -368,7 +368,7 @@ STAGE PLANS:
predicate: (key = '406') (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string)
+ expressions: '406' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out
index 2dac90a..f539265 100644
--- ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out
+++ ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out
@@ -287,7 +287,7 @@ STAGE PLANS:
predicate: (key = '145') (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '145' (type: string), value (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -600,7 +600,7 @@ STAGE PLANS:
predicate: ((key = '484') and (value = 'val_484')) (type: boolean)
Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -754,7 +754,7 @@ STAGE PLANS:
predicate: ((key = '327') and (value = 'val_327')) (type: boolean)
Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
+ expressions: '327' (type: string), 'val_327' (type: string), ds (type: string), hr (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out
index 69b14fe..1674768 100644
--- ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out
+++ ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out
@@ -223,7 +223,7 @@ STAGE PLANS:
predicate: (x = 484) (type: boolean)
Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: x (type: int)
+ expressions: 484 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -370,7 +370,7 @@ STAGE PLANS:
predicate: (x = 495) (type: boolean)
Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: x (type: int)
+ expressions: 495 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -513,7 +513,7 @@ STAGE PLANS:
predicate: (x = 1) (type: boolean)
Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: x (type: int)
+ expressions: 1 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out
index f952c28..7e2298c 100644
--- ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out
+++ ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out
@@ -240,7 +240,7 @@ STAGE PLANS:
predicate: (x = 484) (type: boolean)
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: x (type: int)
+ expressions: 484 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -757,7 +757,7 @@ STAGE PLANS:
predicate: (x = 484) (type: boolean)
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: x (type: int)
+ expressions: 484 (type: int)
outputColumnNames: x
Statistics: Num rows: 3 Data size: 12 Basic stats: COMPLETE Column stats: NONE
Group By Operator
diff --git ql/src/test/results/clientpositive/literal_double.q.out ql/src/test/results/clientpositive/literal_double.q.out
index b3f51f4..251858a 100644
--- ql/src/test/results/clientpositive/literal_double.q.out
+++ ql/src/test/results/clientpositive/literal_double.q.out
@@ -14,7 +14,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: 3.14 (type: double), (- 3.14) (type: double), 3.14E8 (type: double), 3.14E-8 (type: double), (- 3.14E8) (type: double), (- 3.14E-8) (type: double), 3.14E8 (type: double), 3.14E8 (type: double), 3.14E-8 (type: double)
+ expressions: 3.14 (type: double), -3.14 (type: double), 3.14E8 (type: double), 3.14E-8 (type: double), -3.14E8 (type: double), -3.14E-8 (type: double), 3.14E8 (type: double), 3.14E8 (type: double), 3.14E-8 (type: double)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
diff --git ql/src/test/results/clientpositive/macro.q.out ql/src/test/results/clientpositive/macro.q.out
index dc046ba..78cd90a 100644
--- ql/src/test/results/clientpositive/macro.q.out
+++ ql/src/test/results/clientpositive/macro.q.out
@@ -29,7 +29,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: SIGMOID(2) (type: double)
+ expressions: 0.8807970779778823 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
@@ -74,7 +74,7 @@ STAGE PLANS:
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: SIGMOID(2) (type: double)
+ expressions: 0.8807970779778823 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
@@ -119,7 +119,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: (FIXED_NUMBER() + 1) (type: int)
+ expressions: 2 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
@@ -165,7 +165,7 @@ STAGE PLANS:
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: (FIXED_NUMBER() + 1) (type: int)
+ expressions: 2 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
@@ -237,7 +237,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: SIMPLE_ADD(1, 9) (type: int)
+ expressions: 10 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
@@ -283,7 +283,7 @@ STAGE PLANS:
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
GatherStats: false
Select Operator
- expressions: SIMPLE_ADD(1, 9) (type: int)
+ expressions: 10 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
diff --git ql/src/test/results/clientpositive/mapjoin1.q.out ql/src/test/results/clientpositive/mapjoin1.q.out
index c77141b..77b9b3a 100644
--- ql/src/test/results/clientpositive/mapjoin1.q.out
+++ ql/src/test/results/clientpositive/mapjoin1.q.out
@@ -343,9 +343,6 @@ STAGE PLANS:
TableScan
alias: a
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: true (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
HashTable Sink Operator
condition expressions:
0 {value}
@@ -360,9 +357,6 @@ STAGE PLANS:
TableScan
alias: b
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: true (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Map Join Operator
condition map:
Right Outer Join0 to 1
@@ -373,17 +367,17 @@ STAGE PLANS:
0 key (type: string)
1 key (type: string)
outputColumnNames: _col0, _col1, _col4, _col5
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string), _col5 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 10
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 10 Data size: 2060 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 10 Data size: 2060 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/multi_insert.q.out ql/src/test/results/clientpositive/multi_insert.q.out
index 575bd06..1fdad68 100644
--- ql/src/test/results/clientpositive/multi_insert.q.out
+++ ql/src/test/results/clientpositive/multi_insert.q.out
@@ -2506,7 +2506,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2520,7 +2520,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '2' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2534,7 +2534,7 @@ STAGE PLANS:
predicate: (key = 4) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '4' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2600,7 +2600,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2614,7 +2614,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '2' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2628,7 +2628,7 @@ STAGE PLANS:
predicate: (key = 4) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '4' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2694,7 +2694,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2708,7 +2708,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '2' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2722,7 +2722,7 @@ STAGE PLANS:
predicate: (key = 4) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '4' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2788,7 +2788,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2802,7 +2802,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '2' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2816,7 +2816,7 @@ STAGE PLANS:
predicate: (key = 4) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '4' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out
index 06c47ce..439445c 100644
--- ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out
+++ ql/src/test/results/clientpositive/multi_insert_move_tasks_share_dependencies.q.out
@@ -2555,7 +2555,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2569,7 +2569,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '2' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2583,7 +2583,7 @@ STAGE PLANS:
predicate: (key = 4) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '4' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2653,7 +2653,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2667,7 +2667,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '2' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2681,7 +2681,7 @@ STAGE PLANS:
predicate: (key = 4) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '4' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2751,7 +2751,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2765,7 +2765,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '2' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2779,7 +2779,7 @@ STAGE PLANS:
predicate: (key = 4) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '4' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2849,7 +2849,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2863,7 +2863,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '2' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -2877,7 +2877,7 @@ STAGE PLANS:
predicate: (key = 4) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '4' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/num_op_type_conv.q.out ql/src/test/results/clientpositive/num_op_type_conv.q.out
index 4926c8b..5e898d2 100644
--- ql/src/test/results/clientpositive/num_op_type_conv.q.out
+++ ql/src/test/results/clientpositive/num_op_type_conv.q.out
@@ -20,7 +20,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: (null + 7) (type: double), (1.0 - null) (type: double), (null + null) (type: double), (UDFToLong(21) % UDFToByte(5)) (type: bigint), (UDFToLong(21) % UDFToLong(21)) (type: bigint), (9 % '3') (type: double)
+ expressions: (null + 7) (type: double), (1.0 - null) (type: double), (null + null) (type: double), 1 (type: bigint), 0 (type: bigint), 0.0 (type: double)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
diff --git ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
index 6d50419..beeebfe 100644
--- ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
+++ ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out
@@ -319,7 +319,7 @@ STAGE PLANS:
alias: orc_pred
Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((t is not null and (t < 0)) and (t > (- 2))) (type: boolean)
+ predicate: ((t is not null and (t < 0)) and (t > -2)) (type: boolean)
Statistics: Num rows: 58 Data size: 17204 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: t (type: tinyint)
@@ -474,10 +474,10 @@ STAGE PLANS:
alias: orc_pred
Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (((t = (- 1)) and s is not null) and (s like 'bob%')) (type: boolean)
+ predicate: (((t = -1) and s is not null) and (s like 'bob%')) (type: boolean)
Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: t (type: tinyint), s (type: string)
+ expressions: -1 (type: tinyint), s (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -605,7 +605,7 @@ STAGE PLANS:
alias: orc_pred
Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (((s is not null and (s like 'bob%')) and (not (t) IN ((- 1), (- 2), (- 3)))) and t BETWEEN 25 AND 30) (type: boolean)
+ predicate: (((s is not null and (s like 'bob%')) and (not (t) IN (-1, -2, -3))) and t BETWEEN 25 AND 30) (type: boolean)
Statistics: Num rows: 65 Data size: 19281 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: t (type: tinyint), s (type: string)
@@ -782,7 +782,7 @@ STAGE PLANS:
alias: orc_pred
Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (((((((d >= round(9.99)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
+ predicate: (((((((d >= 10.0) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
Statistics: Num rows: 2 Data size: 593 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
@@ -984,7 +984,7 @@ STAGE PLANS:
alias: orc_pred
Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (((((((((t > 10) and (t <> 101)) and (d >= round(9.99))) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
+ predicate: (((((((((t > 10) and (t <> 101)) and (d >= 10.0)) and (d < 12.0)) and t is not null) and (s like '%son')) and (not (s like '%car%'))) and (t > 0)) and si BETWEEN 300 AND 400) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string)
diff --git ql/src/test/results/clientpositive/pcr.q.out ql/src/test/results/clientpositive/pcr.q.out
index db695aa..c734f74 100644
--- ql/src/test/results/clientpositive/pcr.q.out
+++ ql/src/test/results/clientpositive/pcr.q.out
@@ -1694,11 +1694,11 @@ STAGE PLANS:
predicate: (key = 14) (type: boolean)
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
- outputColumnNames: _col0, _col1
+ expressions: value (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: int), _col1 (type: string)
+ key expressions: 14 (type: int), _col1 (type: string)
sort order: ++
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
tag: -1
@@ -1802,7 +1802,7 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string)
+ expressions: 14 (type: int), KEY.reducesinkkey1 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 20 Data size: 160 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -4637,7 +4637,7 @@ STAGE PLANS:
predicate: (key = 2) (type: boolean)
Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: 2 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -4676,7 +4676,7 @@ STAGE PLANS:
predicate: (key = 3) (type: boolean)
Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: 3 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 10 Data size: 80 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -5438,20 +5438,20 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: srcpart
- Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 112 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Filter Operator
isSamplingPred: false
predicate: (key = 11) (type: boolean)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string), ds (type: string), hr (type: string)
+ outputColumnNames: _col1, _col2, _col3
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string), _col2 (type: string), _col3 (type: string)
+ key expressions: '11' (type: string), _col2 (type: string), _col3 (type: string)
sort order: +++
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: string)
auto parallelism: false
@@ -5556,15 +5556,15 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string)
+ expressions: '11' (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
@@ -5661,20 +5661,20 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: srcpart
- Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 112 Data size: 11624 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Filter Operator
isSamplingPred: false
predicate: (key = 11) (type: boolean)
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string), ds (type: string), hr (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ expressions: value (type: string), ds (type: string), hr (type: string)
+ outputColumnNames: _col1, _col2, _col3
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string), _col2 (type: string), _col3 (type: string)
+ key expressions: '11' (type: string), _col2 (type: string), _col3 (type: string)
sort order: +++
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
tag: -1
value expressions: _col1 (type: string)
auto parallelism: false
@@ -5779,15 +5779,15 @@ STAGE PLANS:
Needs Tagging: false
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string)
+ expressions: '11' (type: string), VALUE._col0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
GlobalTableId: 0
#### A masked pattern was here ####
NumFilesPerFileSink: 1
- Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 56 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
#### A masked pattern was here ####
table:
input format: org.apache.hadoop.mapred.TextInputFormat
diff --git ql/src/test/results/clientpositive/ppd2.q.out ql/src/test/results/clientpositive/ppd2.q.out
index dd2aa3f..8b23779 100644
--- ql/src/test/results/clientpositive/ppd2.q.out
+++ ql/src/test/results/clientpositive/ppd2.q.out
@@ -353,9 +353,8 @@ STAGE PLANS:
predicate: (key is not null and (key = 20)) (type: boolean)
Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: x
@@ -364,9 +363,8 @@ STAGE PLANS:
predicate: (key is not null and (key = 20)) (type: boolean)
Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
Reduce Operator Tree:
@@ -374,13 +372,13 @@ STAGE PLANS:
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
- 1 {KEY.reducesinkkey0}
- outputColumnNames: _col0, _col1, _col4
+ 0 {VALUE._col0}
+ 1
+ outputColumnNames: _col1
Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col1 (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -398,10 +396,9 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: _col1 (type: string)
Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col2 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string)
+ expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/ppd_clusterby.q.out ql/src/test/results/clientpositive/ppd_clusterby.q.out
index 9150b7c..d21db56 100644
--- ql/src/test/results/clientpositive/ppd_clusterby.q.out
+++ ql/src/test/results/clientpositive/ppd_clusterby.q.out
@@ -19,18 +19,17 @@ STAGE PLANS:
predicate: (key = 10) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
+ expressions: value (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '10' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '10' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -78,9 +77,8 @@ STAGE PLANS:
predicate: (key is not null and (key = 20)) (type: boolean)
Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: x
@@ -89,9 +87,8 @@ STAGE PLANS:
predicate: (key is not null and (key = 20)) (type: boolean)
Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
Reduce Operator Tree:
@@ -99,17 +96,14 @@ STAGE PLANS:
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
- 1 {KEY.reducesinkkey0}
- outputColumnNames: _col0, _col1, _col4
- Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (_col0 = 20) (type: boolean)
- Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE
+ 0 {VALUE._col0}
+ 1
+ outputColumnNames: _col1
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE
+ expressions: _col1 (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 798 Data size: 1597 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -126,10 +120,9 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: _col1 (type: string)
Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col2 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string)
+ expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 7 Data size: 719 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -176,18 +169,17 @@ STAGE PLANS:
predicate: (key = 10) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
- outputColumnNames: _col0, _col1
+ expressions: value (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: '10' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+ expressions: '10' (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -235,9 +227,8 @@ STAGE PLANS:
predicate: (key is not null and (key = 20)) (type: boolean)
Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
Statistics: Num rows: 14 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: x
@@ -246,9 +237,8 @@ STAGE PLANS:
predicate: (key is not null and (key = 20)) (type: boolean)
Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string)
+ key expressions: '20' (type: string)
sort order: +
- Map-reduce partition columns: key (type: string)
Statistics: Num rows: 7 Data size: 1402 Basic stats: COMPLETE Column stats: NONE
value expressions: value (type: string)
Reduce Operator Tree:
@@ -256,13 +246,13 @@ STAGE PLANS:
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
- 1 {KEY.reducesinkkey0}
- outputColumnNames: _col0, _col1, _col4
+ 0 {VALUE._col0}
+ 1
+ outputColumnNames: _col1
Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col4 (type: string)
- outputColumnNames: _col0, _col1, _col2
+ expressions: _col1 (type: string)
+ outputColumnNames: _col1
Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -280,10 +270,9 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: _col1 (type: string)
Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
- value expressions: _col0 (type: string), _col2 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string), VALUE._col1 (type: string)
+ expressions: '20' (type: string), KEY.reducesinkkey0 (type: string), '20' (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 15 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/ppd_constant_where.q.out ql/src/test/results/clientpositive/ppd_constant_where.q.out
index bb0dba9..d031586 100644
--- ql/src/test/results/clientpositive/ppd_constant_where.q.out
+++ ql/src/test/results/clientpositive/ppd_constant_where.q.out
@@ -17,11 +17,8 @@ STAGE PLANS:
TableScan
alias: srcpart
Statistics: Num rows: 0 Data size: 11624 Basic stats: PARTIAL Column stats: COMPLETE
- Filter Operator
- predicate: ('a' = 'a') (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
Select Operator
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
+ Statistics: Num rows: 0 Data size: 11624 Basic stats: PARTIAL Column stats: COMPLETE
Group By Operator
aggregations: count()
mode: hash
diff --git ql/src/test/results/clientpositive/ppd_join4.q.out ql/src/test/results/clientpositive/ppd_join4.q.out
index 7ee07ae..2c52a12 100644
--- ql/src/test/results/clientpositive/ppd_join4.q.out
+++ ql/src/test/results/clientpositive/ppd_join4.q.out
@@ -58,18 +58,13 @@ STAGE PLANS:
predicate: ((id is not null and (name = 'c')) and (id = 'a')) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: id (type: string), name (type: string)
- outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: 'a' (type: string)
sort order: +
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
- outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
compressed: false
@@ -88,28 +83,24 @@ STAGE PLANS:
predicate: (id is not null and (id = 'a')) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Reduce Output Operator
- key expressions: id (type: string)
+ key expressions: 'a' (type: string)
sort order: +
- Map-reduce partition columns: id (type: string)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
TableScan
Reduce Output Operator
- key expressions: _col0 (type: string)
+ key expressions: 'a' (type: string)
sort order: +
- Map-reduce partition columns: _col0 (type: string)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- value expressions: _col1 (type: string)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
+ 0
1
- outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string)
+ expressions: 'a' (type: string), 'c' (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/ppd_outer_join5.q.out ql/src/test/results/clientpositive/ppd_outer_join5.q.out
index a8d1681..9cb47e1 100644
--- ql/src/test/results/clientpositive/ppd_outer_join5.q.out
+++ ql/src/test/results/clientpositive/ppd_outer_join5.q.out
@@ -45,9 +45,8 @@ STAGE PLANS:
predicate: (id = 20) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Reduce Output Operator
- key expressions: id (type: int)
+ key expressions: 20 (type: int)
sort order: +
- Map-reduce partition columns: id (type: int)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
value expressions: key (type: string), value (type: string)
TableScan
@@ -82,11 +81,11 @@ STAGE PLANS:
condition expressions:
0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
- 2 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
- outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12
+ 2 {VALUE._col0} {VALUE._col1}
+ outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col11, _col12
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string)
+ expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: string), 20 (type: int), _col11 (type: string), _col12 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -134,9 +133,8 @@ STAGE PLANS:
predicate: (id = 20) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Reduce Output Operator
- key expressions: id (type: int)
+ key expressions: 20 (type: int)
sort order: +
- Map-reduce partition columns: id (type: int)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
value expressions: key (type: string), value (type: string)
TableScan
@@ -146,9 +144,8 @@ STAGE PLANS:
predicate: (id = 20) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Reduce Output Operator
- key expressions: id (type: int)
+ key expressions: 20 (type: int)
sort order: +
- Map-reduce partition columns: id (type: int)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
value expressions: key (type: string), value (type: string)
Reduce Operator Tree:
@@ -157,13 +154,13 @@ STAGE PLANS:
Inner Join 0 to 1
Left Outer Join1 to 2
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
- 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
2 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
- outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12
+ outputColumnNames: _col1, _col2, _col6, _col7, _col10, _col11, _col12
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string)
+ expressions: 20 (type: int), _col1 (type: string), _col2 (type: string), 20 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -211,9 +208,8 @@ STAGE PLANS:
predicate: (id = 20) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Reduce Output Operator
- key expressions: id (type: int)
+ key expressions: 20 (type: int)
sort order: +
- Map-reduce partition columns: id (type: int)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
value expressions: key (type: string), value (type: string)
TableScan
@@ -223,9 +219,8 @@ STAGE PLANS:
predicate: (id = 20) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Reduce Output Operator
- key expressions: id (type: int)
+ key expressions: 20 (type: int)
sort order: +
- Map-reduce partition columns: id (type: int)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
value expressions: key (type: string), value (type: string)
Reduce Operator Tree:
@@ -234,13 +229,13 @@ STAGE PLANS:
Inner Join 0 to 1
Left Outer Join0 to 2
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
- 1 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
+ 0 {VALUE._col0} {VALUE._col1}
+ 1 {VALUE._col0} {VALUE._col1}
2 {KEY.reducesinkkey0} {VALUE._col0} {VALUE._col1}
- outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7, _col10, _col11, _col12
+ outputColumnNames: _col1, _col2, _col6, _col7, _col10, _col11, _col12
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col1 (type: string), _col2 (type: string), _col5 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string)
+ expressions: 20 (type: int), _col1 (type: string), _col2 (type: string), 20 (type: int), _col6 (type: string), _col7 (type: string), _col10 (type: int), _col11 (type: string), _col12 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/ppd_repeated_alias.q.out ql/src/test/results/clientpositive/ppd_repeated_alias.q.out
index ccb9c5d..d64589f 100644
--- ql/src/test/results/clientpositive/ppd_repeated_alias.q.out
+++ ql/src/test/results/clientpositive/ppd_repeated_alias.q.out
@@ -72,7 +72,7 @@ STAGE PLANS:
predicate: (_col6 = 3) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col5 (type: int), _col6 (type: int)
+ expressions: _col0 (type: int), _col5 (type: int), 3 (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -206,18 +206,17 @@ STAGE PLANS:
sort order: +
Map-reduce partition columns: foo (type: int)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- value expressions: bar (type: int)
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0} {VALUE._col0}
+ 0 {KEY.reducesinkkey0}
1 {KEY.reducesinkkey0}
- outputColumnNames: _col0, _col1, _col5
+ outputColumnNames: _col0, _col5
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col5 (type: int), _col1 (type: int)
+ expressions: _col0 (type: int), _col5 (type: int), 3 (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/ppd_udf_case.q.out ql/src/test/results/clientpositive/ppd_udf_case.q.out
index 2b4b562..ad68eb6 100644
--- ql/src/test/results/clientpositive/ppd_udf_case.q.out
+++ ql/src/test/results/clientpositive/ppd_udf_case.q.out
@@ -70,8 +70,8 @@ STAGE PLANS:
predicate: (((_col2 = '2008-04-08') and (_col8 = '2008-04-08')) and CASE (_col0) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END) (type: boolean)
Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string), _col8 (type: string), _col9 (type: string)
- outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
+ expressions: _col0 (type: string), _col1 (type: string), _col3 (type: string), _col6 (type: string), _col7 (type: string), _col9 (type: string)
+ outputColumnNames: _col0, _col1, _col3, _col4, _col5, _col7
Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
@@ -85,12 +85,12 @@ STAGE PLANS:
Map Operator Tree:
TableScan
Reduce Output Operator
- key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string), _col6 (type: string), _col7 (type: string)
+ key expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: string), '2008-04-08' (type: string), _col7 (type: string)
sort order: ++++++++
Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Select Operator
- expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), KEY.reducesinkkey2 (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: string), KEY.reducesinkkey5 (type: string), KEY.reducesinkkey6 (type: string), KEY.reducesinkkey7 (type: string)
+ expressions: KEY.reducesinkkey0 (type: string), KEY.reducesinkkey1 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey3 (type: string), KEY.reducesinkkey4 (type: string), KEY.reducesinkkey5 (type: string), '2008-04-08' (type: string), KEY.reducesinkkey7 (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics: Num rows: 1 Data size: 205 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/ppd_udf_col.q.out ql/src/test/results/clientpositive/ppd_udf_col.q.out
index f0a48bb..0bc5fbf 100644
--- ql/src/test/results/clientpositive/ppd_udf_col.q.out
+++ ql/src/test/results/clientpositive/ppd_udf_col.q.out
@@ -23,14 +23,14 @@ STAGE PLANS:
predicate: (key = 100) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), rand() (type: double)
- outputColumnNames: _col0, _col2
+ expressions: rand() (type: double)
+ outputColumnNames: _col2
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (_col2 <= 0.1) (type: boolean)
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col2 (type: double)
+ expressions: '100' (type: string), _col2 (type: double)
outputColumnNames: _col0, _col1
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -76,21 +76,21 @@ STAGE PLANS:
predicate: (key = 100) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), rand() (type: double)
- outputColumnNames: _col0, _col2
+ expressions: rand() (type: double)
+ outputColumnNames: _col2
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (_col2 <= 0.1) (type: boolean)
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col2 (type: double)
- outputColumnNames: _col0, _col1
+ expressions: _col2 (type: double)
+ outputColumnNames: _col1
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (_col1 > 0.1) (type: boolean)
Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: double)
+ expressions: '100' (type: string), _col1 (type: double)
outputColumnNames: _col0, _col1
Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE
Limit
@@ -135,14 +135,14 @@ STAGE PLANS:
predicate: (key = 100) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), rand() (type: double), hex(4) (type: string)
- outputColumnNames: _col0, _col2, _col3
+ expressions: rand() (type: double), '4' (type: string)
+ outputColumnNames: _col2, _col3
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (_col3 <= 3) (type: boolean)
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col2 (type: double), _col3 (type: string)
+ expressions: '100' (type: string), _col2 (type: double), _col3 (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -184,14 +184,14 @@ STAGE PLANS:
predicate: (key = 100) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), rand() (type: double), (value * 10) (type: double)
- outputColumnNames: _col0, _col2, _col3
+ expressions: rand() (type: double), (value * 10) (type: double)
+ outputColumnNames: _col2, _col3
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (_col3 <= 200.0) (type: boolean)
Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col2 (type: double), _col3 (type: double)
+ expressions: '100' (type: string), _col2 (type: double), _col3 (type: double)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -233,14 +233,14 @@ STAGE PLANS:
predicate: (key = 100) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), rand() (type: double)
- outputColumnNames: _col0, _col2
+ expressions: rand() (type: double)
+ outputColumnNames: _col2
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (_col2 <= 0.1) (type: boolean)
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col2 (type: double)
+ expressions: '100' (type: string), _col2 (type: double)
outputColumnNames: _col0, _col1
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -286,14 +286,14 @@ STAGE PLANS:
predicate: (key = 100) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), rand() (type: double)
- outputColumnNames: _col0, _col2
+ expressions: rand() (type: double)
+ outputColumnNames: _col2
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: ((_col2 <= 0.1) and (_col2 > 0.1)) (type: boolean)
Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col2 (type: double)
+ expressions: '100' (type: string), _col2 (type: double)
outputColumnNames: _col0, _col1
Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE
Limit
@@ -338,14 +338,14 @@ STAGE PLANS:
predicate: (key = 100) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), rand() (type: double), hex(4) (type: string)
- outputColumnNames: _col0, _col2, _col3
+ expressions: rand() (type: double), '4' (type: string)
+ outputColumnNames: _col2, _col3
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (_col3 <= 3) (type: boolean)
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col2 (type: double), _col3 (type: string)
+ expressions: '100' (type: string), _col2 (type: double), _col3 (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 9 Data size: 901 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -387,14 +387,14 @@ STAGE PLANS:
predicate: (key = 100) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), rand() (type: double), (value * 10) (type: double)
- outputColumnNames: _col0, _col2, _col3
+ expressions: rand() (type: double), (value * 10) (type: double)
+ outputColumnNames: _col2, _col3
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: (_col3 <= 200.0) (type: boolean)
Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col2 (type: double), _col3 (type: double)
+ expressions: '100' (type: string), _col2 (type: double), _col3 (type: double)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 4 Data size: 801 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/ppd_union_view.q.out ql/src/test/results/clientpositive/ppd_union_view.q.out
index c2bc43f..7b0c528 100644
--- ql/src/test/results/clientpositive/ppd_union_view.q.out
+++ ql/src/test/results/clientpositive/ppd_union_view.q.out
@@ -465,12 +465,12 @@ STAGE PLANS:
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {VALUE._col0} {KEY.reducesinkkey1}
+ 0 {VALUE._col0}
1 {VALUE._col0}
- outputColumnNames: _col1, _col2, _col5
+ outputColumnNames: _col1, _col5
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col5 (type: string), _col1 (type: string), _col2 (type: string)
+ expressions: _col5 (type: string), _col1 (type: string), '2011-10-15' (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/query_result_fileformat.q.out ql/src/test/results/clientpositive/query_result_fileformat.q.out
index 2b1fed8..014c7a1 100644
--- ql/src/test/results/clientpositive/query_result_fileformat.q.out
+++ ql/src/test/results/clientpositive/query_result_fileformat.q.out
@@ -53,7 +53,7 @@ STAGE PLANS:
predicate: (key = 'key1') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: 'key1' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -124,7 +124,7 @@ STAGE PLANS:
predicate: (key = 'key1') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: 'key1' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/quotedid_basic.q.out ql/src/test/results/clientpositive/quotedid_basic.q.out
index 9478b99..0f86eed 100644
--- ql/src/test/results/clientpositive/quotedid_basic.q.out
+++ ql/src/test/results/clientpositive/quotedid_basic.q.out
@@ -76,7 +76,7 @@ STAGE PLANS:
predicate: (!@#$%^&*()_q = '1') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string)
+ expressions: x+1 (type: string), y&y (type: string), '1' (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -112,7 +112,7 @@ STAGE PLANS:
predicate: (!@#$%^&*()_q = '1') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string)
+ expressions: x+1 (type: string), y&y (type: string), '1' (type: string)
outputColumnNames: x+1, y&y, !@#$%^&*()_q
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
@@ -171,7 +171,7 @@ STAGE PLANS:
predicate: (!@#$%^&*()_q = '1') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string)
+ expressions: x+1 (type: string), y&y (type: string), '1' (type: string)
outputColumnNames: x+1, y&y, !@#$%^&*()_q
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
@@ -254,7 +254,7 @@ STAGE PLANS:
predicate: (!@#$%^&*()_q = '1') (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string)
+ expressions: x+1 (type: string), y&y (type: string), '1' (type: string)
outputColumnNames: x+1, y&y, !@#$%^&*()_q
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Group By Operator
diff --git ql/src/test/results/clientpositive/quotedid_partition.q.out ql/src/test/results/clientpositive/quotedid_partition.q.out
index 84bd15e..014d93e 100644
--- ql/src/test/results/clientpositive/quotedid_partition.q.out
+++ ql/src/test/results/clientpositive/quotedid_partition.q.out
@@ -45,7 +45,7 @@ STAGE PLANS:
predicate: (x+1 = '10') (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: x+1 (type: string), y&y (type: string), !@#$%^&*()_q (type: string)
+ expressions: '10' (type: string), y&y (type: string), !@#$%^&*()_q (type: string)
outputColumnNames: x+1, y&y, !@#$%^&*()_q
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Group By Operator
diff --git ql/src/test/results/clientpositive/regex_col.q.out ql/src/test/results/clientpositive/regex_col.q.out
index 97393fc..b2166af 100644
--- ql/src/test/results/clientpositive/regex_col.q.out
+++ ql/src/test/results/clientpositive/regex_col.q.out
@@ -184,9 +184,9 @@ STAGE PLANS:
predicate: ((key = 103) and key is not null) (type: boolean)
Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string), hr (type: string), ds (type: string)
+ key expressions: '103' (type: string), hr (type: string), ds (type: string)
sort order: +++
- Map-reduce partition columns: key (type: string), hr (type: string), ds (type: string)
+ Map-reduce partition columns: hr (type: string), ds (type: string)
Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: a
@@ -195,9 +195,9 @@ STAGE PLANS:
predicate: ((key = 103) and key is not null) (type: boolean)
Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: string), hr (type: string), ds (type: string)
+ key expressions: '103' (type: string), hr (type: string), ds (type: string)
sort order: +++
- Map-reduce partition columns: key (type: string), hr (type: string), ds (type: string)
+ Map-reduce partition columns: hr (type: string), ds (type: string)
Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Join Operator
diff --git ql/src/test/results/clientpositive/regexp_extract.q.out ql/src/test/results/clientpositive/regexp_extract.q.out
index aa89d5d..fb7bc5a 100644
--- ql/src/test/results/clientpositive/regexp_extract.q.out
+++ ql/src/test/results/clientpositive/regexp_extract.q.out
@@ -98,7 +98,7 @@ STAGE PLANS:
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Select Operator
- expressions: key (type: string), value (type: string), (1 + 2) (type: int), (3 + 4) (type: int)
+ expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Transform Operator
@@ -413,7 +413,7 @@ STAGE PLANS:
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Select Operator
- expressions: key (type: string), value (type: string), (1 + 2) (type: int), (3 + 4) (type: int)
+ expressions: key (type: string), value (type: string), 3 (type: int), 7 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Transform Operator
diff --git ql/src/test/results/clientpositive/sample8.q.out ql/src/test/results/clientpositive/sample8.q.out
index 1594893..e0c0f9e 100644
--- ql/src/test/results/clientpositive/sample8.q.out
+++ ql/src/test/results/clientpositive/sample8.q.out
@@ -330,7 +330,7 @@ STAGE PLANS:
predicate: ((((_col6 = _col0) and (_col7 = _col1)) and (_col2 = '2008-04-08')) and (_col3 = '11')) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string)
+ expressions: _col0 (type: string), _col1 (type: string), '2008-04-08' (type: string), '11' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/select_dummy_source.q.out ql/src/test/results/clientpositive/select_dummy_source.q.out
index 2742d56..651ce1d 100644
--- ql/src/test/results/clientpositive/select_dummy_source.q.out
+++ ql/src/test/results/clientpositive/select_dummy_source.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: (1 + 1) (type: int)
+ expressions: 2 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
File Output Operator
@@ -193,7 +193,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: (1 + 1) (type: int)
+ expressions: 2 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -277,7 +277,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: (2 + 3) (type: int), (1 + 2) (type: int)
+ expressions: 5 (type: int), (1 + 2) (type: int)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
File Output Operator
diff --git ql/src/test/results/clientpositive/select_unquote_and.q.out ql/src/test/results/clientpositive/select_unquote_and.q.out
index 47fed47..127fcb6 100644
--- ql/src/test/results/clientpositive/select_unquote_and.q.out
+++ ql/src/test/results/clientpositive/select_unquote_and.q.out
@@ -58,7 +58,7 @@ STAGE PLANS:
alias: npe_test
Statistics: Num rows: 498 Data size: 5290 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((ds > ((2012 - 11) - 31)) and (ds < ((2012 - 12) - 15))) (type: boolean)
+ predicate: ((ds > 1970) and (ds < 1985)) (type: boolean)
Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string), ds (type: string)
diff --git ql/src/test/results/clientpositive/select_unquote_not.q.out ql/src/test/results/clientpositive/select_unquote_not.q.out
index ed35c10..0016c69 100644
--- ql/src/test/results/clientpositive/select_unquote_not.q.out
+++ ql/src/test/results/clientpositive/select_unquote_not.q.out
@@ -58,7 +58,7 @@ STAGE PLANS:
alias: npe_test
Statistics: Num rows: 498 Data size: 5290 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (not (ds < ((2012 - 11) - 31))) (type: boolean)
+ predicate: (not (ds < 1970)) (type: boolean)
Statistics: Num rows: 332 Data size: 3526 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string), ds (type: string)
diff --git ql/src/test/results/clientpositive/select_unquote_or.q.out ql/src/test/results/clientpositive/select_unquote_or.q.out
index 0425cca..537b9bb 100644
--- ql/src/test/results/clientpositive/select_unquote_or.q.out
+++ ql/src/test/results/clientpositive/select_unquote_or.q.out
@@ -58,7 +58,7 @@ STAGE PLANS:
alias: npe_test
Statistics: Num rows: 498 Data size: 5290 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((ds > ((2012 - 11) - 31)) or (ds < ((2012 - 12) - 15))) (type: boolean)
+ predicate: ((ds > 1970) or (ds < 1985)) (type: boolean)
Statistics: Num rows: 332 Data size: 3526 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string), ds (type: string)
diff --git ql/src/test/results/clientpositive/set_processor_namespaces.q.out ql/src/test/results/clientpositive/set_processor_namespaces.q.out
index df229ce..6d301d8 100644
--- ql/src/test/results/clientpositive/set_processor_namespaces.q.out
+++ ql/src/test/results/clientpositive/set_processor_namespaces.q.out
@@ -22,7 +22,7 @@ STAGE PLANS:
predicate: (key = 5) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '5' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/set_variable_sub.q.out ql/src/test/results/clientpositive/set_variable_sub.q.out
index bda25d7..debd600 100644
--- ql/src/test/results/clientpositive/set_variable_sub.q.out
+++ ql/src/test/results/clientpositive/set_variable_sub.q.out
@@ -17,7 +17,7 @@ STAGE PLANS:
predicate: (key = 'value1') (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: 'value1' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -53,7 +53,7 @@ STAGE PLANS:
predicate: (key = 'value1') (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: 'value1' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -89,7 +89,7 @@ STAGE PLANS:
predicate: (key = '1') (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '1' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/smb_mapjoin_18.q.out ql/src/test/results/clientpositive/smb_mapjoin_18.q.out
index dbda142..8f68025 100644
--- ql/src/test/results/clientpositive/smb_mapjoin_18.q.out
+++ ql/src/test/results/clientpositive/smb_mapjoin_18.q.out
@@ -234,7 +234,7 @@ STAGE PLANS:
predicate: (key = 238) (type: boolean)
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: 238 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/smb_mapjoin_25.q.out ql/src/test/results/clientpositive/smb_mapjoin_25.q.out
index 43daa7a..bd65898 100644
--- ql/src/test/results/clientpositive/smb_mapjoin_25.q.out
+++ ql/src/test/results/clientpositive/smb_mapjoin_25.q.out
@@ -66,9 +66,8 @@ STAGE PLANS:
predicate: (key is not null and (key = 5)) (type: boolean)
Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: int)
+ key expressions: 5 (type: int)
sort order: +
- Map-reduce partition columns: key (type: int)
Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: a
@@ -77,21 +76,19 @@ STAGE PLANS:
predicate: (key is not null and (key = 5)) (type: boolean)
Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: int)
+ key expressions: 5 (type: int)
sort order: +
- Map-reduce partition columns: key (type: int)
Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0}
+ 0
1
- outputColumnNames: _col0
Statistics: Num rows: 14 Data size: 57 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: int)
+ expressions: 5 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 14 Data size: 57 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -129,7 +126,7 @@ STAGE PLANS:
predicate: (_col1 = 5) (type: boolean)
Statistics: Num rows: 8 Data size: 33 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: int), _col1 (type: int)
+ expressions: _col0 (type: int), 5 (type: int)
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 33 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -150,9 +147,8 @@ STAGE PLANS:
predicate: (key is not null and (key = 5)) (type: boolean)
Statistics: Num rows: 14 Data size: 56 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: int)
+ key expressions: 5 (type: int)
sort order: +
- Map-reduce partition columns: key (type: int)
Statistics: Num rows: 14 Data size: 56 Basic stats: COMPLETE Column stats: NONE
TableScan
alias: c
@@ -161,21 +157,19 @@ STAGE PLANS:
predicate: (key is not null and (key = 5)) (type: boolean)
Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
- key expressions: key (type: int)
+ key expressions: 5 (type: int)
sort order: +
- Map-reduce partition columns: key (type: int)
Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Join Operator
condition map:
Inner Join 0 to 1
condition expressions:
- 0 {KEY.reducesinkkey0}
+ 0
1
- outputColumnNames: _col0
Statistics: Num rows: 15 Data size: 61 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: _col0 (type: int)
+ expressions: 5 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 15 Data size: 61 Basic stats: COMPLETE Column stats: NONE
File Output Operator
@@ -230,9 +224,8 @@ STAGE PLANS:
keys:
0 key (type: int)
1 key (type: int)
- outputColumnNames: _col0
Select Operator
- expressions: _col0 (type: int)
+ expressions: 5 (type: int)
outputColumnNames: _col0
Reduce Output Operator
key expressions: _col0 (type: int)
@@ -253,9 +246,8 @@ STAGE PLANS:
keys:
0 key (type: int)
1 key (type: int)
- outputColumnNames: _col0
Select Operator
- expressions: _col0 (type: int)
+ expressions: 5 (type: int)
outputColumnNames: _col0
Reduce Output Operator
key expressions: _col0 (type: int)
@@ -272,7 +264,7 @@ STAGE PLANS:
Filter Operator
predicate: (_col1 = 5) (type: boolean)
Select Operator
- expressions: _col0 (type: int), _col1 (type: int)
+ expressions: _col0 (type: int), 5 (type: int)
outputColumnNames: _col0, _col1
File Output Operator
compressed: false
diff --git ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out
index 267c285..ecc90c1 100644
--- ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out
+++ ql/src/test/results/clientpositive/stats_empty_dyn_part.q.out
@@ -33,7 +33,7 @@ STAGE PLANS:
predicate: (key = 'no_such_value') (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: 'no_such_value' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
diff --git ql/src/test/results/clientpositive/subq_where_serialization.q.out ql/src/test/results/clientpositive/subq_where_serialization.q.out
index b0d9536..c5df3ad 100644
--- ql/src/test/results/clientpositive/subq_where_serialization.q.out
+++ ql/src/test/results/clientpositive/subq_where_serialization.q.out
@@ -93,8 +93,6 @@ STAGE PLANS:
0 key (type: string)
1 _col0 (type: string)
outputColumnNames: _col0
- Filter Operator
- predicate: (1 = 1) (type: boolean)
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
@@ -136,16 +134,13 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/subquery_exists.q.out ql/src/test/results/clientpositive/subquery_exists.q.out
index 9313da9..1dd9cf4 100644
--- ql/src/test/results/clientpositive/subquery_exists.q.out
+++ ql/src/test/results/clientpositive/subquery_exists.q.out
@@ -66,16 +66,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/subquery_exists_having.q.out ql/src/test/results/clientpositive/subquery_exists_having.q.out
index b5b73da..003ae75 100644
--- ql/src/test/results/clientpositive/subquery_exists_having.q.out
+++ ql/src/test/results/clientpositive/subquery_exists_having.q.out
@@ -104,16 +104,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -244,9 +241,6 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
@@ -268,9 +262,6 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
diff --git ql/src/test/results/clientpositive/subquery_in.q.out ql/src/test/results/clientpositive/subquery_in.q.out
index d35e2ad..6ceeb16 100644
--- ql/src/test/results/clientpositive/subquery_in.q.out
+++ ql/src/test/results/clientpositive/subquery_in.q.out
@@ -157,16 +157,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -269,16 +266,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -447,16 +441,13 @@ STAGE PLANS:
1
outputColumnNames: _col1, _col5
Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -618,16 +609,13 @@ STAGE PLANS:
1
outputColumnNames: _col1, _col2, _col5
Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 2 Data size: 465 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 2 Data size: 465 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 465 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -763,16 +751,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -947,24 +932,24 @@ STAGE PLANS:
value expressions: _col0 (type: int), _col3 (type: int)
TableScan
alias: lineitem
- Statistics: Num rows: 116 Data size: 12099 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1728 Data size: 12099 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: ((l_shipmode = 'AIR') and l_orderkey is not null) (type: boolean)
- Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: l_orderkey (type: int)
outputColumnNames: _col0
- Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: int)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: int)
sort order: +
Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
Reduce Operator Tree:
Join Operator
condition map:
diff --git ql/src/test/results/clientpositive/subquery_in_having.q.out ql/src/test/results/clientpositive/subquery_in_having.q.out
index 2f0a015..8eae55f 100644
--- ql/src/test/results/clientpositive/subquery_in_having.q.out
+++ ql/src/test/results/clientpositive/subquery_in_having.q.out
@@ -136,16 +136,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 15 Data size: 1546 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -343,16 +340,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -510,16 +504,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: double)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -677,9 +668,6 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: double)
outputColumnNames: _col0, _col1
@@ -714,9 +702,6 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: double)
outputColumnNames: _col0, _col1
@@ -841,19 +826,16 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count()
keys: _col0 (type: string), _col1 (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -869,7 +851,7 @@ STAGE PLANS:
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: bigint)
Reduce Operator Tree:
Group By Operator
@@ -877,10 +859,10 @@ STAGE PLANS:
keys: KEY._col0 (type: string), KEY._col1 (type: string)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 4 Data size: 826 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: _col2 is not null (type: boolean)
- Statistics: Num rows: 4 Data size: 826 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -896,7 +878,7 @@ STAGE PLANS:
key expressions: _col2 (type: bigint)
sort order: +
Map-reduce partition columns: _col2 (type: bigint)
- Statistics: Num rows: 4 Data size: 826 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: string), _col1 (type: string)
TableScan
Reduce Output Operator
@@ -913,16 +895,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 440 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 4 Data size: 440 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 440 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1109,8 +1088,6 @@ STAGE PLANS:
0 _col2 (type: bigint)
1 _col0 (type: bigint)
outputColumnNames: _col0, _col1, _col2
- Filter Operator
- predicate: (1 = 1) (type: boolean)
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
outputColumnNames: _col0, _col1, _col2
@@ -1131,7 +1108,7 @@ STAGE PLANS:
key expressions: _col2 (type: bigint)
sort order: +
Map-reduce partition columns: _col2 (type: bigint)
- Statistics: Num rows: 4 Data size: 826 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
value expressions: _col0 (type: string), _col1 (type: string)
TableScan
Reduce Output Operator
@@ -1148,16 +1125,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 440 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 4 Data size: 440 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 440 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 9 Data size: 991 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1214,24 +1188,21 @@ STAGE PLANS:
1 _col0 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
Group By Operator
aggregations: count()
keys: _col0 (type: string), _col1 (type: string)
mode: hash
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string), _col1 (type: string)
sort order: ++
Map-reduce partition columns: _col0 (type: string), _col1 (type: string)
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
value expressions: _col2 (type: bigint)
Local Work:
Map Reduce Local Work
@@ -1241,10 +1212,10 @@ STAGE PLANS:
keys: KEY._col0 (type: string), KEY._col1 (type: string)
mode: mergepartial
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 4 Data size: 826 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: _col2 is not null (type: boolean)
- Statistics: Num rows: 4 Data size: 826 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -1354,8 +1325,6 @@ STAGE PLANS:
0 _col1 (type: string)
1 _col0 (type: string)
outputColumnNames: _col0, _col1, _col2
- Filter Operator
- predicate: (1 = 1) (type: boolean)
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double)
outputColumnNames: _col0, _col1, _col2
@@ -1393,16 +1362,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 8 Data size: 1861 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: double)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1861 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1861 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/subquery_multiinsert.q.out ql/src/test/results/clientpositive/subquery_multiinsert.q.out
index 6f44cb0..1f1ec9d 100644
--- ql/src/test/results/clientpositive/subquery_multiinsert.q.out
+++ ql/src/test/results/clientpositive/subquery_multiinsert.q.out
@@ -183,12 +183,12 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col4
Statistics: Num rows: 34 Data size: 7032 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col4 is null) (type: boolean)
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col4 is null (type: boolean)
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
table:
@@ -203,16 +203,16 @@ STAGE PLANS:
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Reduce Operator Tree:
Select Operator
expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -270,16 +270,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -678,16 +675,13 @@ STAGE PLANS:
1 _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -751,16 +745,16 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col4
Statistics: Num rows: 34 Data size: 7032 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col4 is null) (type: boolean)
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col4 is null (type: boolean)
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
value expressions: _col1 (type: string)
Local Work:
Map Reduce Local Work
@@ -768,10 +762,10 @@ STAGE PLANS:
Select Operator
expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/subquery_notexists.q.out ql/src/test/results/clientpositive/subquery_notexists.q.out
index 6c76dd2..2a0dfa6 100644
--- ql/src/test/results/clientpositive/subquery_notexists.q.out
+++ ql/src/test/results/clientpositive/subquery_notexists.q.out
@@ -59,15 +59,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col6
Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col6 is null) (type: boolean)
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col6 is null (type: boolean)
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -312,15 +312,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col5
Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col5 is null) (type: boolean)
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col5 is null (type: boolean)
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/subquery_notexists_having.q.out ql/src/test/results/clientpositive/subquery_notexists_having.q.out
index a625333..e2c5e43 100644
--- ql/src/test/results/clientpositive/subquery_notexists_having.q.out
+++ ql/src/test/results/clientpositive/subquery_notexists_having.q.out
@@ -93,15 +93,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col4
Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col4 is null) (type: boolean)
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col4 is null (type: boolean)
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -237,15 +237,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col3
Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col3 is null) (type: boolean)
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col3 is null (type: boolean)
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 7 Data size: 1443 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 15 Data size: 3093 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/subquery_notin.q.out ql/src/test/results/clientpositive/subquery_notin.q.out
index 8cf191e..22cb1ac 100644
--- ql/src/test/results/clientpositive/subquery_notin.q.out
+++ ql/src/test/results/clientpositive/subquery_notin.q.out
@@ -228,15 +228,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col4
Statistics: Num rows: 34 Data size: 7032 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col4 is null) (type: boolean)
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col4 is null (type: boolean)
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 1654 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 3516 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -469,15 +469,15 @@ STAGE PLANS:
outputColumnNames: _col1, _col2, _col5, _col11
Statistics: Num rows: 17 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col11 is null) (type: boolean)
- Statistics: Num rows: 4 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col11 is null (type: boolean)
+ Statistics: Num rows: 8 Data size: 1806 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 4 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1806 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1806 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -743,15 +743,15 @@ STAGE PLANS:
outputColumnNames: _col1, _col5, _col11
Statistics: Num rows: 36 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col11 is null) (type: boolean)
- Statistics: Num rows: 9 Data size: 959 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col11 is null (type: boolean)
+ Statistics: Num rows: 18 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 9 Data size: 959 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 18 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 9 Data size: 959 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 18 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1040,15 +1040,15 @@ STAGE PLANS:
outputColumnNames: _col1, _col2, _col5, _col11
Statistics: Num rows: 17 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col11 is null) (type: boolean)
- Statistics: Num rows: 4 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col11 is null (type: boolean)
+ Statistics: Num rows: 8 Data size: 1806 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 4 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1806 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1806 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -1567,15 +1567,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1
Statistics: Num rows: 22 Data size: 2302 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col1 is null) (type: boolean)
- Statistics: Num rows: 5 Data size: 523 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col1 is null (type: boolean)
+ Statistics: Num rows: 11 Data size: 1151 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 5 Data size: 523 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 1151 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 5 Data size: 523 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 11 Data size: 1151 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/subquery_notin_having.q.out ql/src/test/results/clientpositive/subquery_notin_having.q.out
index 4f9fb13..55b6b71 100644
--- ql/src/test/results/clientpositive/subquery_notin_having.q.out
+++ ql/src/test/results/clientpositive/subquery_notin_having.q.out
@@ -176,15 +176,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 69 Data size: 7032 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col2 is null) (type: boolean)
- Statistics: Num rows: 17 Data size: 1732 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col2 is null (type: boolean)
+ Statistics: Num rows: 34 Data size: 3465 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: bigint)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 17 Data size: 1732 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 34 Data size: 3465 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 17 Data size: 1732 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 34 Data size: 3465 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -354,15 +354,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col4
Statistics: Num rows: 34 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col4 is null) (type: boolean)
- Statistics: Num rows: 8 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col4 is null (type: boolean)
+ Statistics: Num rows: 17 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: double)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -646,15 +646,15 @@ STAGE PLANS:
outputColumnNames: _col0, _col1, _col3
Statistics: Num rows: 34 Data size: 3839 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col3 is null) (type: boolean)
- Statistics: Num rows: 8 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ predicate: _col3 is null (type: boolean)
+ Statistics: Num rows: 17 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: double)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 903 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 17 Data size: 1919 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -670,7 +670,7 @@ STAGE PLANS:
predicate: p_mfgr is null (type: boolean)
Statistics: Num rows: 14 Data size: 1531 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: p_mfgr (type: string), p_retailprice (type: double)
+ expressions: null (type: void), p_retailprice (type: double)
outputColumnNames: p_mfgr, p_retailprice
Statistics: Num rows: 14 Data size: 1531 Basic stats: COMPLETE Column stats: NONE
Group By Operator
diff --git ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
index c835562..548d214 100644
--- ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
+++ ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
@@ -119,16 +119,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 3 Data size: 661 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 661 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 3 Data size: 661 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -192,16 +189,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -341,9 +335,6 @@ STAGE PLANS:
1
outputColumnNames: _col1, _col2, _col5
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1, _col2
@@ -488,9 +479,6 @@ STAGE PLANS:
1
outputColumnNames: _col1, _col2, _col5
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1, _col2
@@ -609,16 +597,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -718,16 +703,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string), _col2 (type: bigint)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 3 Data size: 660 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 7 Data size: 1542 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -877,7 +859,7 @@ STAGE PLANS:
outputColumnNames: _col1, _col2, _col5, _col11
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: ((1 = 1) and _col11 is null) (type: boolean)
+ predicate: _col11 is null (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int)
diff --git ql/src/test/results/clientpositive/subquery_views.q.out ql/src/test/results/clientpositive/subquery_views.q.out
index 6c82a59..07370f6 100644
--- ql/src/test/results/clientpositive/subquery_views.q.out
+++ ql/src/test/results/clientpositive/subquery_views.q.out
@@ -65,6 +65,325 @@ POSTHOOK: Input: default@src
POSTHOOK: Output: default@cv2
Warning: Shuffle Join JOIN[42][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-5:MAPRED' is a cross product
Warning: Shuffle Join JOIN[18][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
+PREHOOK: query: explain
+select *
+from cv2 where cv2.key in (select key from cv2 c where c.key < '11')
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select *
+from cv2 where cv2.key in (select key from cv2 c where c.key < '11')
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-4 is a root stage
+ Stage-5 depends on stages: Stage-4
+ Stage-6 depends on stages: Stage-5
+ Stage-3 depends on stages: Stage-2, Stage-6
+ Stage-9 is a root stage
+ Stage-1 depends on stages: Stage-9
+ Stage-2 depends on stages: Stage-1
+ Stage-0 depends on stages: Stage-3
+
+STAGE PLANS:
+ Stage: Stage-4
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: a
+ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((value > 'val_11') and ((key is null or value is null) or key is null)) (type: boolean)
+ Statistics: Num rows: 12 Data size: 2404 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ Statistics: Num rows: 12 Data size: 2404 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: count()
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: bigint)
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col0 = 0) (type: boolean)
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: bigint)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: bigint)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-5
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ TableScan
+ alias: b
+ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((key < '11') and key is not null) (type: boolean)
+ Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+ value expressions: key (type: string), value (type: string)
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Semi Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 5 Data size: 1102 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-6
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string), _col0 (type: string)
+ sort order: +++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col0 (type: string)
+ Statistics: Num rows: 5 Data size: 1102 Basic stats: COMPLETE Column stats: NONE
+ TableScan
+ alias: a
+ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (((value > 'val_11') and (key < '11')) and key is not null) (type: boolean)
+ Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), value (type: string), key (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ sort order: +++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ Statistics: Num rows: 2 Data size: 400 Basic stats: COMPLETE Column stats: NONE
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ condition expressions:
+ 0 {KEY.reducesinkkey0}
+ 1 {KEY.reducesinkkey0}
+ outputColumnNames: _col0, _col4
+ Statistics: Num rows: 5 Data size: 1212 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: _col4 is null (type: boolean)
+ Statistics: Num rows: 2 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: string)
+ outputColumnNames: _col0
+ Statistics: Num rows: 2 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: string)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 2 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-3
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 8 Data size: 1711 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: string)
+ TableScan
+ Reduce Output Operator
+ key expressions: _col0 (type: string)
+ sort order: +
+ Map-reduce partition columns: _col0 (type: string)
+ Statistics: Num rows: 2 Data size: 484 Basic stats: COMPLETE Column stats: NONE
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Semi Join 0 to 1
+ condition expressions:
+ 0 {KEY.reducesinkkey0} {VALUE._col0}
+ 1
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 8 Data size: 1882 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: string), _col1 (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 8 Data size: 1882 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 8 Data size: 1882 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-9
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: a
+ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((value > 'val_11') and ((key is null or value is null) or key is null)) (type: boolean)
+ Statistics: Num rows: 12 Data size: 2404 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ Statistics: Num rows: 12 Data size: 2404 Basic stats: COMPLETE Column stats: NONE
+ Group By Operator
+ aggregations: count()
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col0 (type: bigint)
+ Reduce Operator Tree:
+ Group By Operator
+ aggregations: count(VALUE._col0)
+ mode: mergepartial
+ outputColumnNames: _col0
+ Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: (_col0 = 0) (type: boolean)
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: bigint)
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Group By Operator
+ keys: _col0 (type: bigint)
+ mode: hash
+ outputColumnNames: _col0
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: b
+ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: key is not null (type: boolean)
+ Statistics: Num rows: 15 Data size: 3006 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 15 Data size: 3006 Basic stats: COMPLETE Column stats: NONE
+ value expressions: key (type: string), value (type: string)
+ TableScan
+ Reduce Output Operator
+ sort order:
+ Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Semi Join 0 to 1
+ condition expressions:
+ 0 {VALUE._col0} {VALUE._col1}
+ 1
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-2
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string), _col0 (type: string)
+ sort order: +++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col0 (type: string)
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
+ TableScan
+ alias: a
+ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((value > 'val_11') and key is not null) (type: boolean)
+ Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: key (type: string), value (type: string), key (type: string)
+ outputColumnNames: _col0, _col1, _col2
+ Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ sort order: +++
+ Map-reduce partition columns: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ Statistics: Num rows: 5 Data size: 1002 Basic stats: COMPLETE Column stats: NONE
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Left Outer Join0 to 1
+ condition expressions:
+ 0 {KEY.reducesinkkey0} {KEY.reducesinkkey1}
+ 1 {KEY.reducesinkkey0}
+ outputColumnNames: _col0, _col1, _col4
+ Statistics: Num rows: 17 Data size: 3636 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: _col4 is null (type: boolean)
+ Statistics: Num rows: 8 Data size: 1711 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: _col0 (type: string), _col1 (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 8 Data size: 1711 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ table:
+ input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
+Warning: Shuffle Join JOIN[42][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-5:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[18][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
PREHOOK: query: select *
from cv2 where cv2.key in (select key from cv2 c where c.key < '11')
PREHOOK: type: QUERY
diff --git ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out
index 5de4348..f243b0a 100644
--- ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out
+++ ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out
@@ -245,23 +245,20 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reducer 3
Reduce Operator Tree:
Group By Operator
@@ -360,23 +357,20 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reducer 3
Reduce Operator Tree:
Group By Operator
@@ -498,23 +492,20 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reducer 3
Reduce Operator Tree:
Group By Operator
diff --git ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
index 0105a7e..d384538 100644
--- ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
+++ ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out
@@ -208,23 +208,20 @@ STAGE PLANS:
1 key (type: string)
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Map 3
Map Operator Tree:
TableScan
@@ -322,23 +319,20 @@ STAGE PLANS:
1
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Map 3
Map Operator Tree:
TableScan
@@ -437,23 +431,20 @@ STAGE PLANS:
1 key (type: string)
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
expressions: _col0 (type: string)
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: string)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: string)
sort order: +
Map-reduce partition columns: _col0 (type: string)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
+ Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE
Map 3
Map Operator Tree:
TableScan
diff --git ql/src/test/results/clientpositive/tez/insert1.q.out ql/src/test/results/clientpositive/tez/insert1.q.out
index b8f7e96..1779d37 100644
--- ql/src/test/results/clientpositive/tez/insert1.q.out
+++ ql/src/test/results/clientpositive/tez/insert1.q.out
@@ -47,10 +47,10 @@ STAGE PLANS:
alias: a
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key = (- 1)) (type: boolean)
+ predicate: (key = -1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: -1 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -99,10 +99,10 @@ STAGE PLANS:
alias: a
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key = (- 1)) (type: boolean)
+ predicate: (key = -1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: -1 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -164,10 +164,10 @@ STAGE PLANS:
alias: a
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key = (- 1)) (type: boolean)
+ predicate: (key = -1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: -1 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
@@ -216,10 +216,10 @@ STAGE PLANS:
alias: a
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Filter Operator
- predicate: (key = (- 1)) (type: boolean)
+ predicate: (key = -1) (type: boolean)
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
Select Operator
- expressions: key (type: int), value (type: string)
+ expressions: -1 (type: int), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/tez/subquery_exists.q.out ql/src/test/results/clientpositive/tez/subquery_exists.q.out
index 4944b9c..d90d1dc 100644
--- ql/src/test/results/clientpositive/tez/subquery_exists.q.out
+++ ql/src/test/results/clientpositive/tez/subquery_exists.q.out
@@ -74,16 +74,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
diff --git ql/src/test/results/clientpositive/tez/subquery_in.q.out ql/src/test/results/clientpositive/tez/subquery_in.q.out
index 99680eb..2385910 100644
--- ql/src/test/results/clientpositive/tez/subquery_in.q.out
+++ ql/src/test/results/clientpositive/tez/subquery_in.q.out
@@ -165,16 +165,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 1653 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 3306 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -285,16 +282,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -451,16 +445,13 @@ STAGE PLANS:
1
outputColumnNames: _col1, _col5
Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 8 Data size: 872 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 16 Data size: 1744 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -560,16 +551,13 @@ STAGE PLANS:
1
outputColumnNames: _col1, _col2, _col5
Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 2 Data size: 465 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col2 (type: string), _col1 (type: string), _col5 (type: int)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 2 Data size: 465 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 2 Data size: 465 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 4 Data size: 930 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -732,16 +720,13 @@ STAGE PLANS:
1
outputColumnNames: _col0, _col1
Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: (1 = 1) (type: boolean)
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: _col0 (type: string), _col1 (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
File Output Operator
compressed: false
- Statistics: Num rows: 4 Data size: 881 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 8 Data size: 1763 Basic stats: COMPLETE Column stats: NONE
table:
input format: org.apache.hadoop.mapred.TextInputFormat
output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
@@ -894,24 +879,24 @@ STAGE PLANS:
Map Operator Tree:
TableScan
alias: lineitem
- Statistics: Num rows: 116 Data size: 12099 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 1728 Data size: 12099 Basic stats: COMPLETE Column stats: NONE
Filter Operator
predicate: ((l_shipmode = 'AIR') and l_orderkey is not null) (type: boolean)
- Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: l_orderkey (type: int)
outputColumnNames: _col0
- Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
Group By Operator
keys: _col0 (type: int)
mode: hash
outputColumnNames: _col0
- Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
Reduce Output Operator
key expressions: _col0 (type: int)
sort order: +
Map-reduce partition columns: _col0 (type: int)
- Statistics: Num rows: 29 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 432 Data size: 3024 Basic stats: COMPLETE Column stats: NONE
Reducer 2
Reduce Operator Tree:
Join Operator
diff --git ql/src/test/results/clientpositive/tez/transform_ppr1.q.out ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
index df08356..0de3254 100644
--- ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
+++ ql/src/test/results/clientpositive/tez/transform_ppr1.q.out
@@ -135,7 +135,7 @@ STAGE PLANS:
Map-reduce partition columns: _col1 (type: string)
Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
tag: -1
- value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ value expressions: '2008-04-08' (type: string), _col1 (type: string), _col2 (type: string)
auto parallelism: true
Path -> Alias:
#### A masked pattern was here ####
diff --git ql/src/test/results/clientpositive/transform_ppr1.q.out ql/src/test/results/clientpositive/transform_ppr1.q.out
index 7752717..6f908fa 100644
--- ql/src/test/results/clientpositive/transform_ppr1.q.out
+++ ql/src/test/results/clientpositive/transform_ppr1.q.out
@@ -130,7 +130,7 @@ STAGE PLANS:
Map-reduce partition columns: _col1 (type: string)
Statistics: Num rows: 19 Data size: 3807 Basic stats: COMPLETE Column stats: NONE
tag: -1
- value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string)
+ value expressions: '2008-04-08' (type: string), _col1 (type: string), _col2 (type: string)
auto parallelism: false
Path -> Alias:
#### A masked pattern was here ####
diff --git ql/src/test/results/clientpositive/type_cast_1.q.out ql/src/test/results/clientpositive/type_cast_1.q.out
index d337727..85930ea 100644
--- ql/src/test/results/clientpositive/type_cast_1.q.out
+++ ql/src/test/results/clientpositive/type_cast_1.q.out
@@ -16,7 +16,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: (if(false, 1, UDFToShort(2)) + 3) (type: int)
+ expressions: 5 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
diff --git ql/src/test/results/clientpositive/type_widening.q.out ql/src/test/results/clientpositive/type_widening.q.out
index f1901f4..e48473f 100644
--- ql/src/test/results/clientpositive/type_widening.q.out
+++ ql/src/test/results/clientpositive/type_widening.q.out
@@ -16,7 +16,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: COALESCE(0,9223372036854775807) (type: bigint)
+ expressions: 0 (type: bigint)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
diff --git ql/src/test/results/clientpositive/udf1.q.out ql/src/test/results/clientpositive/udf1.q.out
index 6ce78a2..8f57cd7 100644
--- ql/src/test/results/clientpositive/udf1.q.out
+++ ql/src/test/results/clientpositive/udf1.q.out
@@ -52,7 +52,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: ('a' like '%a%') (type: boolean), ('b' like '%a%') (type: boolean), ('ab' like '%a%') (type: boolean), ('ab' like '%a_') (type: boolean), ('%_' like '\%\_') (type: boolean), ('ab' like '\%\_') (type: boolean), ('ab' like '_a%') (type: boolean), ('ab' like 'a') (type: boolean), ('' rlike '.*') (type: boolean), ('a' rlike '[ab]') (type: boolean), ('' rlike '[ab]') (type: boolean), ('hadoop' rlike '[a-z]*') (type: boolean), ('hadoop' rlike 'o*') (type: boolean), regexp_replace('abc', 'b', 'c') (type: string), regexp_replace('abc', 'z', 'a') (type: string), regexp_replace('abbbb', 'bb', 'b') (type: string), regexp_replace('hadoop', '(.)[a-z]*', '$1ive') (type: string), regexp_replace('hadoopAAA', 'A.*', '') (type: string), regexp_replace('abc', '', 'A') (type: string), ('abc' rlike '') (type: boolean)
+ expressions: true (type: boolean), false (type: boolean), true (type: boolean), true (type: boolean), true (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), true (type: boolean), true (type: boolean), false (type: boolean), true (type: boolean), true (type: boolean), 'acc' (type: string), 'abc' (type: string), 'abb' (type: string), 'hive' (type: string), 'hadoop' (type: string), 'AaAbAcA' (type: string), false (type: boolean)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf4.q.out ql/src/test/results/clientpositive/udf4.q.out
index d1bb2ba..81e5470 100644
--- ql/src/test/results/clientpositive/udf4.q.out
+++ ql/src/test/results/clientpositive/udf4.q.out
@@ -76,7 +76,7 @@ STAGE PLANS:
alias: dest1
Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: round(1.0) (type: double), round(1.5) (type: double), round((- 1.5)) (type: double), floor(1.0) (type: bigint), floor(1.5) (type: bigint), floor((- 1.5)) (type: bigint), sqrt(1.0) (type: double), sqrt((- 1.0)) (type: double), sqrt(0.0) (type: double), ceil(1.0) (type: bigint), ceil(1.5) (type: bigint), ceil((- 1.5)) (type: bigint), ceil(1.0) (type: bigint), rand(3) (type: double), 3 (type: int), (- 3) (type: int), (1 + 2) (type: int), (1 + (- 2)) (type: int), (~ 1) (type: int), (~ UDFToByte(1)) (type: tinyint), (~ UDFToShort(1)) (type: smallint), (~ UDFToLong(1)) (type: bigint), (UDFToByte(1) & UDFToByte(2)) (type: tinyint), (UDFToShort(1) & UDFToShort(2)) (type: smallint), (1 & 2) (type: int), (UDFToLong(1) & UDFToLong(2)) (type: bigint), (UDFToByte(1) | UDFToByte(2)) (type: tinyint), (UDFToShort(1) | UDFToShort(2)) (type: smallint), (1 | 2) (type: int), (UDFToLong(1) | UDFToLong(2)) (type: bigint), (UDFToByte(1) ^ UDFToByte(3)) (type: tinyint), (UDFToShort(1) ^ UDFToShort(3)) (type: smallint), (1 ^ 3) (type: int), (UDFToLong(1) ^ UDFToLong(3)) (type: bigint)
+ expressions: 1.0 (type: double), 2.0 (type: double), -2.0 (type: double), 1 (type: bigint), 1 (type: bigint), -2 (type: bigint), 1.0 (type: double), null (type: void), 0.0 (type: double), 1 (type: bigint), 2 (type: bigint), -1 (type: bigint), 1 (type: bigint), rand(3) (type: double), 3 (type: int), -3 (type: int), 3 (type: int), -1 (type: int), -2 (type: int), -2 (type: tinyint), -2 (type: smallint), -2 (type: bigint), 0 (type: tinyint), 0 (type: smallint), 0 (type: int), 0 (type: bigint), 3 (type: tinyint), 3 (type: smallint), 3 (type: int), 3 (type: bigint), 2 (type: tinyint), 2 (type: smallint), 2 (type: int), 2 (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33
Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf5.q.out ql/src/test/results/clientpositive/udf5.q.out
index b8671f3..7096275 100644
--- ql/src/test/results/clientpositive/udf5.q.out
+++ ql/src/test/results/clientpositive/udf5.q.out
@@ -32,7 +32,7 @@ STAGE PLANS:
alias: dest1
Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: from_unixtime(1226446340) (type: string), to_date(from_unixtime(1226446340)) (type: string), day('2008-11-01') (type: int), month('2008-11-01') (type: int), year('2008-11-01') (type: int), day('2008-11-01 15:32:20') (type: int), month('2008-11-01 15:32:20') (type: int), year('2008-11-01 15:32:20') (type: int)
+ expressions: '2008-11-11 15:32:20' (type: string), '2008-11-11' (type: string), 1 (type: int), 11 (type: int), 2008 (type: int), 1 (type: int), 11 (type: int), 2008 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7
Statistics: Num rows: 1 Data size: 221 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf6.q.out ql/src/test/results/clientpositive/udf6.q.out
index 595354b..a3ac65f 100644
--- ql/src/test/results/clientpositive/udf6.q.out
+++ ql/src/test/results/clientpositive/udf6.q.out
@@ -32,7 +32,7 @@ STAGE PLANS:
alias: dest1
Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: if(true, 1, 2) (type: int)
+ expressions: 1 (type: int)
outputColumnNames: _col0
Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
@@ -86,7 +86,7 @@ STAGE PLANS:
alias: dest1
Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: if(true, 1, 2) (type: int), if(false, 1, 2) (type: int), if(null, 1, 2) (type: int), if(true, 'a', 'b') (type: string), if(true, 0.1, 0.2) (type: double), if(false, UDFToLong(1), UDFToLong(2)) (type: bigint), if(false, UDFToByte(127), UDFToByte(126)) (type: tinyint), if(false, UDFToShort(127), UDFToShort(128)) (type: smallint), 128 (type: int), 1.0 (type: double), '128' (type: string)
+ expressions: 1 (type: int), 2 (type: int), if(null, 1, 2) (type: int), 'a' (type: string), 0.1 (type: double), 2 (type: bigint), 126 (type: tinyint), 128 (type: smallint), 128 (type: int), 1.0 (type: double), '128' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
Statistics: Num rows: 1 Data size: 220 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf7.q.out ql/src/test/results/clientpositive/udf7.q.out
index a53387d..c4eef14 100644
--- ql/src/test/results/clientpositive/udf7.q.out
+++ ql/src/test/results/clientpositive/udf7.q.out
@@ -46,7 +46,7 @@ STAGE PLANS:
alias: dest1
Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: COMPLETE
Select Operator
- expressions: round(ln(3.0), 12) (type: double), ln(0.0) (type: double), ln((- 1)) (type: double), round(log(3.0), 12) (type: double), log(0.0) (type: double), log((- 1)) (type: double), round(log2(3.0), 12) (type: double), log2(0.0) (type: double), log2((- 1)) (type: double), round(log10(3.0), 12) (type: double), log10(0.0) (type: double), log10((- 1)) (type: double), round(log(2, 3.0), 12) (type: double), log(2, 0.0) (type: double), log(2, (- 1)) (type: double), log(0.5, 2) (type: double), log(2, 0.5) (type: double), round(exp(2.0), 12) (type: double), power(2, 3) (type: double), power(2, 3) (type: double), power(2, (- 3)) (type: double), power(0.5, (- 3)) (type: double), power(4, 0.5) (type: double), power((- 1), 0.5) (type: double), power((- 1), 2) (type: double), power(CAST( 1 AS decimal(10,0)), 0) (type: double), power(CAST( 2 AS decimal(10,0)), 3) (type: double), power(CAST( 2 AS decimal(10,0)), 3) (type: double)
+ expressions: 1.098612288668 (type: double), null (type: void), null (type: void), 1.098612288668 (type: double), null (type: void), null (type: void), 1.584962500721 (type: double), null (type: void), null (type: void), 0.47712125472 (type: double), null (type: void), null (type: void), 1.584962500721 (type: double), null (type: void), null (type: void), null (type: void), -1.0 (type: double), 7.389056098931 (type: double), 8.0 (type: double), 8.0 (type: double), 0.125 (type: double), 8.0 (type: double), 2.0 (type: double), NaN (type: double), 1.0 (type: double), power(CAST( 1 AS decimal(10,0)), 0) (type: double), power(CAST( 2 AS decimal(10,0)), 3) (type: double), power(CAST( 2 AS decimal(10,0)), 3) (type: double)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27
Statistics: Num rows: 1 Data size: 224 Basic stats: COMPLETE Column stats: COMPLETE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf9.q.out ql/src/test/results/clientpositive/udf9.q.out
index 4d25988..d0809bb 100644
--- ql/src/test/results/clientpositive/udf9.q.out
+++ ql/src/test/results/clientpositive/udf9.q.out
@@ -35,7 +35,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: datediff('2008-12-31', '2009-01-01') (type: int), datediff('2008-03-01', '2008-02-28') (type: int), datediff('2007-03-01', '2007-01-28') (type: int), datediff('2008-03-01 23:59:59', '2008-03-02 00:00:00') (type: int), date_add('2008-12-31', 1) (type: string), date_add('2008-12-31', 365) (type: string), date_add('2008-02-28', 2) (type: string), date_add('2009-02-28', 2) (type: string), date_add('2007-02-28', 365) (type: string), date_add('2007-02-28 23:59:59', 730) (type: string), date_sub('2009-01-01', 1) (type: string), date_sub('2009-01-01', 365) (type: string), date_sub('2008-02-28', 2) (type: string), date_sub('2009-02-28', 2) (type: string), date_sub('2007-02-28', 365) (type: string), date_sub('2007-02-28 01:12:34', 730) (type: string)
+ expressions: -1 (type: int), 2 (type: int), 32 (type: int), -1 (type: int), '2009-01-01' (type: string), '2009-12-31' (type: string), '2008-03-01' (type: string), '2009-03-02' (type: string), '2008-02-28' (type: string), '2009-02-27' (type: string), '2008-12-31' (type: string), '2008-01-02' (type: string), '2008-02-26' (type: string), '2009-02-26' (type: string), '2006-02-28' (type: string), '2005-02-28' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf_10_trims.q.out ql/src/test/results/clientpositive/udf_10_trims.q.out
index 82e4328..d37acba 100644
--- ql/src/test/results/clientpositive/udf_10_trims.q.out
+++ ql/src/test/results/clientpositive/udf_10_trims.q.out
@@ -38,7 +38,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: trim(trim(trim(trim(trim(trim(trim(trim(trim(trim(' abc ')))))))))) (type: string)
+ expressions: 'abc' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf_E.q.out ql/src/test/results/clientpositive/udf_E.q.out
index a859718..104e34d 100644
--- ql/src/test/results/clientpositive/udf_E.q.out
+++ ql/src/test/results/clientpositive/udf_E.q.out
@@ -17,7 +17,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: e() (type: double)
+ expressions: 2.718281828459045 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -64,7 +64,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: e() (type: double)
+ expressions: 2.718281828459045 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_PI.q.out ql/src/test/results/clientpositive/udf_PI.q.out
index f42850d..339b47b 100644
--- ql/src/test/results/clientpositive/udf_PI.q.out
+++ ql/src/test/results/clientpositive/udf_PI.q.out
@@ -17,7 +17,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: pi() (type: double)
+ expressions: 3.141592653589793 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -64,7 +64,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: pi() (type: double)
+ expressions: 3.141592653589793 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_abs.q.out ql/src/test/results/clientpositive/udf_abs.q.out
index ea68381..97753c1 100644
--- ql/src/test/results/clientpositive/udf_abs.q.out
+++ ql/src/test/results/clientpositive/udf_abs.q.out
@@ -42,7 +42,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: abs(0) (type: int), abs((- 1)) (type: int), abs(123) (type: int), abs((- 9223372036854775807)) (type: bigint), abs(9223372036854775807) (type: bigint)
+ expressions: 0 (type: int), 1 (type: int), 123 (type: int), 9223372036854775807 (type: bigint), 9223372036854775807 (type: bigint)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -93,7 +93,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: abs(0.0) (type: double), abs((- 3.14159265)) (type: double), abs(3.14159265) (type: double)
+ expressions: 0.0 (type: double), 3.14159265 (type: double), 3.14159265 (type: double)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_ascii.q.out ql/src/test/results/clientpositive/udf_ascii.q.out
index 185a0cd..c9b5501 100644
--- ql/src/test/results/clientpositive/udf_ascii.q.out
+++ ql/src/test/results/clientpositive/udf_ascii.q.out
@@ -38,7 +38,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: ascii('Facebook') (type: int), ascii('') (type: int), ascii('!') (type: int)
+ expressions: 70 (type: int), 0 (type: int), 33 (type: int)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_between.q.out ql/src/test/results/clientpositive/udf_between.q.out
index 129f3cc..fc01e09 100644
--- ql/src/test/results/clientpositive/udf_between.q.out
+++ ql/src/test/results/clientpositive/udf_between.q.out
@@ -24,7 +24,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (key + 100) BETWEEN (150 + (- 50)) AND (150 + 50) (type: boolean)
+ predicate: (key + 100) BETWEEN 100 AND 200 (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string)
@@ -79,7 +79,7 @@ STAGE PLANS:
alias: src
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Filter Operator
- predicate: (key + 100) NOT BETWEEN (150 + (- 50)) AND (150 + 50) (type: boolean)
+ predicate: (key + 100) NOT BETWEEN 100 AND 200 (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string)
@@ -133,13 +133,10 @@ STAGE PLANS:
TableScan
alias: src
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: 'b' BETWEEN 'a' AND 'c' (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 1
Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
@@ -169,13 +166,10 @@ STAGE PLANS:
TableScan
alias: src
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
- Filter Operator
- predicate: 2 BETWEEN 2 AND '3' (type: boolean)
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
expressions: key (type: string), value (type: string)
outputColumnNames: _col0, _col1
- Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
+ Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Limit
Number of rows: 1
Statistics: Num rows: 1 Data size: 200 Basic stats: COMPLETE Column stats: NONE
diff --git ql/src/test/results/clientpositive/udf_case.q.out ql/src/test/results/clientpositive/udf_case.q.out
index fe11fce..6c186bd 100644
--- ql/src/test/results/clientpositive/udf_case.q.out
+++ ql/src/test/results/clientpositive/udf_case.q.out
@@ -77,7 +77,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: CASE (1) WHEN (1) THEN (2) WHEN (3) THEN (4) ELSE (5) END (type: int), CASE (2) WHEN (1) THEN (2) ELSE (5) END (type: int), CASE (14) WHEN (12) THEN (13) WHEN (14) THEN (15) END (type: int), CASE (16) WHEN (12) THEN (13) WHEN (14) THEN (15) END (type: int), CASE (17) WHEN (18) THEN (null) WHEN (17) THEN (20) END (type: int), CASE (21) WHEN (22) THEN (23) WHEN (21) THEN (24) END (type: int)
+ expressions: 2 (type: int), 5 (type: int), 15 (type: int), null (type: void), CASE (17) WHEN (18) THEN (null) WHEN (17) THEN (20) END (type: int), 24 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_coalesce.q.out ql/src/test/results/clientpositive/udf_coalesce.q.out
index 78d0ab9..b158965 100644
--- ql/src/test/results/clientpositive/udf_coalesce.q.out
+++ ql/src/test/results/clientpositive/udf_coalesce.q.out
@@ -66,7 +66,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: COALESCE(1) (type: int), COALESCE(1,2) (type: int), COALESCE(null,2) (type: int), COALESCE(1,null) (type: int), COALESCE(null,null,3) (type: int), COALESCE(4,null,null,null) (type: int), COALESCE('1') (type: string), COALESCE('1','2') (type: string), COALESCE(null,'2') (type: string), COALESCE('1',null) (type: string), COALESCE(null,null,'3') (type: string), COALESCE('4',null,null,null) (type: string), COALESCE(1.0) (type: double), COALESCE(1.0,2.0) (type: double), COALESCE(null,2.0) (type: double), COALESCE(null,2.0,3.0) (type: double), COALESCE(2.0,null,3.0) (type: double), COALESCE(if(true, null, 0),null) (type: int)
+ expressions: 1 (type: int), 1 (type: int), COALESCE(null,2) (type: int), COALESCE(1,null) (type: int), COALESCE(null,null,3) (type: int), COALESCE(4,null,null,null) (type: int), '1' (type: string), '1' (type: string), COALESCE(null,'2') (type: string), COALESCE('1',null) (type: string), COALESCE(null,null,'3') (type: string), COALESCE('4',null,null,null) (type: string), 1.0 (type: double), 1.0 (type: double), COALESCE(null,2.0) (type: double), COALESCE(null,2.0,3.0) (type: double), COALESCE(2.0,null,3.0) (type: double), COALESCE(if(true, null, 0),null) (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_current_database.q.out ql/src/test/results/clientpositive/udf_current_database.q.out
index 6a2319b..b4381e9 100644
--- ql/src/test/results/clientpositive/udf_current_database.q.out
+++ ql/src/test/results/clientpositive/udf_current_database.q.out
@@ -22,7 +22,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: current_database() (type: string)
+ expressions: 'default' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
File Output Operator
@@ -75,7 +75,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: current_database() (type: string)
+ expressions: 'xxx' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
File Output Operator
@@ -124,7 +124,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: current_database() (type: string)
+ expressions: 'default' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -161,7 +161,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: current_database() (type: string)
+ expressions: 'xxx' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 1 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_degrees.q.out ql/src/test/results/clientpositive/udf_degrees.q.out
index 82c7705..2882819 100644
--- ql/src/test/results/clientpositive/udf_degrees.q.out
+++ ql/src/test/results/clientpositive/udf_degrees.q.out
@@ -17,7 +17,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: degrees(pi()) (type: double)
+ expressions: 180.0 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -64,7 +64,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: degrees(pi()) (type: double)
+ expressions: 180.0 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_elt.q.out ql/src/test/results/clientpositive/udf_elt.q.out
index c963f9d..0439a5f 100644
--- ql/src/test/results/clientpositive/udf_elt.q.out
+++ ql/src/test/results/clientpositive/udf_elt.q.out
@@ -52,7 +52,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: elt(2, 'abc', 'defg') (type: string), elt(3, 'aa', 'bb', 'cc', 'dd', 'ee', 'ff', 'gg') (type: string), elt('1', 'abc', 'defg') (type: string), elt(2, 'aa', UDFToByte('2')) (type: string), elt(2, 'aa', UDFToShort('12345')) (type: string), elt(2, 'aa', UDFToLong('123456789012')) (type: string), elt(2, 'aa', UDFToFloat(1.25)) (type: string), elt(2, 'aa', 16.0) (type: string), elt(null, 'abc', 'defg') (type: string), elt(0, 'abc', 'defg') (type: string), elt(3, 'abc', 'defg') (type: string)
+ expressions: 'defg' (type: string), 'cc' (type: string), 'abc' (type: string), '2' (type: string), '12345' (type: string), '123456789012' (type: string), '1.25' (type: string), '16.0' (type: string), elt(null, 'abc', 'defg') (type: string), null (type: void), null (type: void)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_format_number.q.out ql/src/test/results/clientpositive/udf_format_number.q.out
index 636cdae..9cc08ea 100644
--- ql/src/test/results/clientpositive/udf_format_number.q.out
+++ ql/src/test/results/clientpositive/udf_format_number.q.out
@@ -42,7 +42,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: format_number(12332.123456, 4) (type: string), format_number(12332.1, 4) (type: string), format_number(12332.2, 0) (type: string)
+ expressions: '12,332.1235' (type: string), '12,332.1000' (type: string), '12,332' (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_hash.q.out ql/src/test/results/clientpositive/udf_hash.q.out
index c2591d7..9e2ebf0 100644
--- ql/src/test/results/clientpositive/udf_hash.q.out
+++ ql/src/test/results/clientpositive/udf_hash.q.out
@@ -37,7 +37,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: hash(UDFToByte(1)) (type: int), hash(UDFToShort(2)) (type: int), hash(3) (type: int), hash(UDFToLong('123456789012')) (type: int), hash(UDFToFloat(1.25)) (type: int), hash(16.0) (type: int), hash('400') (type: int), hash('abc') (type: int), hash(true) (type: int), hash(false) (type: int), hash(1,2,3) (type: int)
+ expressions: 1 (type: int), 2 (type: int), 3 (type: int), -1097262584 (type: int), 1067450368 (type: int), 1076887552 (type: int), 51508 (type: int), 96354 (type: int), 1 (type: int), 0 (type: int), 1026 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_hour.q.out ql/src/test/results/clientpositive/udf_hour.q.out
index 156fb5b..6802d1a 100644
--- ql/src/test/results/clientpositive/udf_hour.q.out
+++ ql/src/test/results/clientpositive/udf_hour.q.out
@@ -37,7 +37,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: hour('2009-08-07 13:14:15') (type: int), hour('13:14:15') (type: int), hour('2009-08-07') (type: int)
+ expressions: 13 (type: int), 13 (type: int), null (type: void)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
ListSink
diff --git ql/src/test/results/clientpositive/udf_if.q.out ql/src/test/results/clientpositive/udf_if.q.out
index 6a9458d..d5515d0 100644
--- ql/src/test/results/clientpositive/udf_if.q.out
+++ ql/src/test/results/clientpositive/udf_if.q.out
@@ -39,7 +39,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: if(true, 1, 2) (type: int), if(false, UDFToString(null), UDFToString(1)) (type: string), if((1 = 1), if((2 = 2), 1, 2), if((3 = 3), 3, 4)) (type: int), if((2 = 2), 1, null) (type: int), if((2 = 2), null, 1) (type: int), if(if(true, null, false), 1, 2) (type: int)
+ expressions: 1 (type: int), if(false, UDFToString(null), '1') (type: string), 1 (type: int), if(true, 1, null) (type: int), if(true, null, 1) (type: int), if(if(true, null, false), 1, 2) (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -94,7 +94,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: if(true, UDFToShort(128), UDFToByte(1)) (type: smallint), if(false, 1, 1.1) (type: double), if(false, 1, 'ABC') (type: string), if(false, 'ABC', 12.3) (type: string)
+ expressions: 128 (type: smallint), 1.1 (type: double), 'ABC' (type: string), '12.3' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_instr.q.out ql/src/test/results/clientpositive/udf_instr.q.out
index 4533674..e79cff1 100644
--- ql/src/test/results/clientpositive/udf_instr.q.out
+++ ql/src/test/results/clientpositive/udf_instr.q.out
@@ -56,7 +56,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: instr('abcd''abc') (type: int), instr('abcabc''ccc') (type: int), instr(123'23') (type: int), instr(12323) (type: int), instr(true1) (type: int), instr(false1) (type: int), instr('12345'UDFToByte('2')) (type: int), instr(UDFToShort('12345')'34') (type: int), instr(UDFToLong('123456789012')'456') (type: int), instr(UDFToFloat(1.25)'.25') (type: int), instr(16.0'.0') (type: int), instr(null'abc') (type: int), instr('abcd'null) (type: int)
+ expressions: 1 (type: int), 0 (type: int), 2 (type: int), 2 (type: int), 0 (type: int), 0 (type: int), 2 (type: int), 3 (type: int), 4 (type: int), 2 (type: int), 3 (type: int), instr(null'abc') (type: int), instr('abcd'null) (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out
index c6188d3..30c62a8 100644
--- ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out
+++ ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out
@@ -43,16 +43,13 @@ STAGE PLANS:
TableScan
alias: src
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
- Filter Operator
- predicate: true is not null (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
Select Operator
- expressions: null is null (type: boolean), 1 is not null (type: boolean), 'my string' is not null (type: boolean)
+ expressions: null is null (type: boolean), true (type: boolean), true (type: boolean)
outputColumnNames: _col0, _col1, _col2
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Limit
Number of rows: 1
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
PREHOOK: query: SELECT NULL IS NULL,
diff --git ql/src/test/results/clientpositive/udf_like.q.out ql/src/test/results/clientpositive/udf_like.q.out
index 13c9876..685ec94 100644
--- ql/src/test/results/clientpositive/udf_like.q.out
+++ ql/src/test/results/clientpositive/udf_like.q.out
@@ -38,7 +38,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: ('_%_' like '%\_\%\_%') (type: boolean), ('__' like '%\_\%\_%') (type: boolean), ('%%_%_' like '%\_\%\_%') (type: boolean), ('%_%_%' like '%\%\_\%') (type: boolean), ('_%_' like '\%\_%') (type: boolean), ('%__' like '__\%%') (type: boolean), ('_%' like '\_\%\_\%%') (type: boolean), ('_%' like '\_\%_%') (type: boolean), ('%_' like '\%\_') (type: boolean), ('ab' like '\%\_') (type: boolean), ('ab' like '_a%') (type: boolean), ('ab' like 'a') (type: boolean), ('ab' like '') (type: boolean), ('' like '') (type: boolean)
+ expressions: true (type: boolean), false (type: boolean), true (type: boolean), true (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), true (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), false (type: boolean), true (type: boolean)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
ListSink
diff --git ql/src/test/results/clientpositive/udf_locate.q.out ql/src/test/results/clientpositive/udf_locate.q.out
index 736ff5c..fbde599 100644
--- ql/src/test/results/clientpositive/udf_locate.q.out
+++ ql/src/test/results/clientpositive/udf_locate.q.out
@@ -64,7 +64,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: locate('abc''abcd') (type: int), locate('ccc''abcabc') (type: int), locate('23'123) (type: int), locate(23123) (type: int), locate('abc''abcabc'2) (type: int), locate('abc''abcabc''2') (type: int), locate(1true) (type: int), locate(1false) (type: int), locate(UDFToByte('2')'12345') (type: int), locate('34'UDFToShort('12345')) (type: int), locate('456'UDFToLong('123456789012')) (type: int), locate('.25'UDFToFloat(1.25)) (type: int), locate('.0'16.0) (type: int), locate(null'abc') (type: int), locate('abc'null) (type: int), locate('abc''abcd'null) (type: int), locate('abc''abcd''invalid number') (type: int)
+ expressions: 1 (type: int), 0 (type: int), 2 (type: int), 2 (type: int), 4 (type: int), 4 (type: int), 0 (type: int), 0 (type: int), 2 (type: int), 3 (type: int), 4 (type: int), 2 (type: int), 3 (type: int), locate(null'abc') (type: int), locate('abc'null) (type: int), locate('abc''abcd'null) (type: int), 0 (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_lower.q.out ql/src/test/results/clientpositive/udf_lower.q.out
index ddc8895..5c619cc 100644
--- ql/src/test/results/clientpositive/udf_lower.q.out
+++ ql/src/test/results/clientpositive/udf_lower.q.out
@@ -33,7 +33,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: lower('AbC 123') (type: string), upper('AbC 123') (type: string)
+ expressions: 'abc 123' (type: string), 'ABC 123' (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf_lpad.q.out ql/src/test/results/clientpositive/udf_lpad.q.out
index 148db4d..913b002 100644
--- ql/src/test/results/clientpositive/udf_lpad.q.out
+++ ql/src/test/results/clientpositive/udf_lpad.q.out
@@ -38,7 +38,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: lpad('hi', 1, '?') (type: string), lpad('hi', 5, '.') (type: string), lpad('hi', 6, '123') (type: string)
+ expressions: 'h' (type: string), '...hi' (type: string), '1231hi' (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_minute.q.out ql/src/test/results/clientpositive/udf_minute.q.out
index 1dd70d5..58ece49 100644
--- ql/src/test/results/clientpositive/udf_minute.q.out
+++ ql/src/test/results/clientpositive/udf_minute.q.out
@@ -37,7 +37,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: minute('2009-08-07 13:14:15') (type: int), minute('13:14:15') (type: int), minute('2009-08-07') (type: int)
+ expressions: 14 (type: int), 14 (type: int), null (type: void)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf_nvl.q.out ql/src/test/results/clientpositive/udf_nvl.q.out
index 2565779..29fff78 100644
--- ql/src/test/results/clientpositive/udf_nvl.q.out
+++ ql/src/test/results/clientpositive/udf_nvl.q.out
@@ -34,7 +34,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: if 1 is null returns2 (type: int), if null is null returns5 (type: int)
+ expressions: 1 (type: int), if null is null returns5 (type: int)
outputColumnNames: _col0, _col1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_parse_url.q.out ql/src/test/results/clientpositive/udf_parse_url.q.out
index 0be5722..0becf0e 100644
--- ql/src/test/results/clientpositive/udf_parse_url.q.out
+++ ql/src/test/results/clientpositive/udf_parse_url.q.out
@@ -60,7 +60,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PATH') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'REF') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k2') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k3') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'FILE') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PROTOCOL') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'USERINFO') (type: string), parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'AUTHORITY') (type: string)
+ expressions: 'facebook.com' (type: string), '/path1/p.php' (type: string), 'k1=v1&k2=v2' (type: string), 'Ref1' (type: string), 'v2' (type: string), 'v1' (type: string), null (type: void), '/path1/p.php?k1=v1&k2=v2' (type: string), 'http' (type: string), null (type: void), 'facebook.com' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
File Output Operator
diff --git ql/src/test/results/clientpositive/udf_printf.q.out ql/src/test/results/clientpositive/udf_printf.q.out
index 73734bd..be097f8 100644
--- ql/src/test/results/clientpositive/udf_printf.q.out
+++ ql/src/test/results/clientpositive/udf_printf.q.out
@@ -38,7 +38,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: printf('Hello World %d %s', 100, 'days') (type: string)
+ expressions: 'Hello World 100 days' (type: string)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_radians.q.out ql/src/test/results/clientpositive/udf_radians.q.out
index 0729b1b..1f4b014 100644
--- ql/src/test/results/clientpositive/udf_radians.q.out
+++ ql/src/test/results/clientpositive/udf_radians.q.out
@@ -17,7 +17,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: radians(57.2958) (type: double)
+ expressions: 1.000000357564167 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -73,7 +73,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: radians(57.2958) (type: double)
+ expressions: 1.000000357564167 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_reflect2.q.out ql/src/test/results/clientpositive/udf_reflect2.q.out
index 6b19277..0a123d7 100644
--- ql/src/test/results/clientpositive/udf_reflect2.q.out
+++ ql/src/test/results/clientpositive/udf_reflect2.q.out
@@ -320,7 +320,7 @@ STAGE PLANS:
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
GatherStats: false
Select Operator
- expressions: UDFToInteger(key) (type: int), value (type: string), CAST( '2013-02-15 19:41:20' AS TIMESTAMP) (type: timestamp)
+ expressions: UDFToInteger(key) (type: int), value (type: string), 2013-02-15 19:41:20.0 (type: timestamp)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE
Select Operator
diff --git ql/src/test/results/clientpositive/udf_repeat.q.out ql/src/test/results/clientpositive/udf_repeat.q.out
index 07b09e9..f4c0fef 100644
--- ql/src/test/results/clientpositive/udf_repeat.q.out
+++ ql/src/test/results/clientpositive/udf_repeat.q.out
@@ -38,7 +38,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: repeat('Facebook', 3) (type: string), repeat('', 4) (type: string), repeat('asd', 0) (type: string), repeat('asdf', (- 1)) (type: string)
+ expressions: 'FacebookFacebookFacebook' (type: string), '' (type: string), '' (type: string), '' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_rpad.q.out ql/src/test/results/clientpositive/udf_rpad.q.out
index 287f5a9..ea02e10 100644
--- ql/src/test/results/clientpositive/udf_rpad.q.out
+++ ql/src/test/results/clientpositive/udf_rpad.q.out
@@ -38,7 +38,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: rpad('hi', 1, '?') (type: string), rpad('hi', 5, '.') (type: string), rpad('hi', 6, '123') (type: string)
+ expressions: 'h' (type: string), 'hi...' (type: string), 'hi1231' (type: string)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_second.q.out ql/src/test/results/clientpositive/udf_second.q.out
index 4abfae1..096c384 100644
--- ql/src/test/results/clientpositive/udf_second.q.out
+++ ql/src/test/results/clientpositive/udf_second.q.out
@@ -37,7 +37,7 @@ STAGE PLANS:
predicate: (key = 86) (type: boolean)
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: second('2009-08-07 13:14:15') (type: int), second('13:14:15') (type: int), second('2009-08-07') (type: int)
+ expressions: 15 (type: int), 15 (type: int), null (type: void)
outputColumnNames: _col0, _col1, _col2
Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE
ListSink
diff --git ql/src/test/results/clientpositive/udf_sign.q.out ql/src/test/results/clientpositive/udf_sign.q.out
index f0851a9..f4da95d 100644
--- ql/src/test/results/clientpositive/udf_sign.q.out
+++ ql/src/test/results/clientpositive/udf_sign.q.out
@@ -17,7 +17,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: sign(0) (type: double)
+ expressions: 0.0 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
@@ -81,7 +81,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: sign(0) (type: double)
+ expressions: 0.0 (type: double)
outputColumnNames: _col0
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_space.q.out ql/src/test/results/clientpositive/udf_space.q.out
index fd8c2ca..14d696a 100644
--- ql/src/test/results/clientpositive/udf_space.q.out
+++ ql/src/test/results/clientpositive/udf_space.q.out
@@ -40,7 +40,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: space(10) (type: string), space(0) (type: string), space(1) (type: string), space((- 1)) (type: string), space((- 100)) (type: string)
+ expressions: ' ' (type: string), '' (type: string), ' ' (type: string), '' (type: string), '' (type: string)
outputColumnNames: _col0, _col1, _col2, _col3, _col4
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/udf_when.q.out ql/src/test/results/clientpositive/udf_when.q.out
index 42736f4..cbb1210 100644
--- ql/src/test/results/clientpositive/udf_when.q.out
+++ ql/src/test/results/clientpositive/udf_when.q.out
@@ -77,7 +77,7 @@ STAGE PLANS:
Row Limit Per Split: 1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Select Operator
- expressions: CASE WHEN ((1 = 1)) THEN (2) WHEN ((1 = 3)) THEN (4) ELSE (5) END (type: int), CASE WHEN ((6 = 7)) THEN (8) ELSE (9) END (type: int), CASE WHEN ((10 = 11)) THEN (12) WHEN ((13 = 13)) THEN (14) END (type: int), CASE WHEN ((15 = 16)) THEN (17) WHEN ((18 = 19)) THEN (20) END (type: int), CASE WHEN ((21 = 22)) THEN (null) WHEN ((23 = 23)) THEN (24) END (type: int), CASE WHEN ((25 = 26)) THEN (27) WHEN ((28 = 28)) THEN (null) END (type: int)
+ expressions: 2 (type: int), 9 (type: int), 14 (type: int), null (type: void), CASE WHEN (false) THEN (null) WHEN (true) THEN (24) END (type: int), CASE WHEN (false) THEN (27) WHEN (true) THEN (null) END (type: int)
outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
ListSink
diff --git ql/src/test/results/clientpositive/union20.q.out ql/src/test/results/clientpositive/union20.q.out
index 07e810f..99ca6f9 100644
--- ql/src/test/results/clientpositive/union20.q.out
+++ ql/src/test/results/clientpositive/union20.q.out
@@ -37,11 +37,8 @@ STAGE PLANS:
TableScan
alias: s3
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
- Filter Operator
- predicate: 'tst1' is not null (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
Select Operator
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Group By Operator
aggregations: count(1)
mode: hash
@@ -152,11 +149,8 @@ STAGE PLANS:
TableScan
alias: s1
Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
- Filter Operator
- predicate: 'tst1' is not null (type: boolean)
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
Select Operator
- Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: COMPLETE
+ Statistics: Num rows: 0 Data size: 5812 Basic stats: PARTIAL Column stats: COMPLETE
Group By Operator
aggregations: count(1)
mode: hash
diff --git ql/src/test/results/clientpositive/union27.q.out ql/src/test/results/clientpositive/union27.q.out
index af15e87..0c2a3d1 100644
--- ql/src/test/results/clientpositive/union27.q.out
+++ ql/src/test/results/clientpositive/union27.q.out
@@ -19,6 +19,89 @@ POSTHOOK: query: create table jackson_sev_add as select * from src
POSTHOOK: type: CREATETABLE_AS_SELECT
POSTHOOK: Input: default@src
POSTHOOK: Output: default@jackson_sev_add
+PREHOOK: query: explain select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97
+PREHOOK: type: QUERY
+POSTHOOK: query: explain select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+ Stage-1 is a root stage
+ Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+ Stage: Stage-1
+ Map Reduce
+ Map Operator Tree:
+ TableScan
+ alias: a
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((key = 97) and key is not null) (type: boolean)
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: '97' (type: string)
+ sort order: +
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
+ TableScan
+ alias: jackson_sev_add
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((key = 97) and key is not null) (type: boolean)
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
+ Union
+ Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: '97' (type: string)
+ sort order: +
+ Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: string)
+ TableScan
+ alias: dim_pho
+ Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE
+ Filter Operator
+ predicate: ((key = 97) and key is not null) (type: boolean)
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: value (type: string)
+ outputColumnNames: _col1
+ Statistics: Num rows: 125 Data size: 1328 Basic stats: COMPLETE Column stats: NONE
+ Union
+ Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+ Reduce Output Operator
+ key expressions: '97' (type: string)
+ sort order: +
+ Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE
+ value expressions: _col1 (type: string)
+ Reduce Operator Tree:
+ Join Operator
+ condition map:
+ Inner Join 0 to 1
+ condition expressions:
+ 0
+ 1 {VALUE._col0}
+ outputColumnNames: _col5
+ Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+ Select Operator
+ expressions: '97' (type: string), _col5 (type: string)
+ outputColumnNames: _col0, _col1
+ Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+ File Output Operator
+ compressed: false
+ Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE
+ table:
+ input format: org.apache.hadoop.mapred.TextInputFormat
+ output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+ serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+ Stage: Stage-0
+ Fetch Operator
+ limit: -1
+ Processor Tree:
+ ListSink
+
PREHOOK: query: select b.* from jackson_sev_same a join (select * from dim_pho union all select * from jackson_sev_add)b on a.key=b.key and b.key=97
PREHOOK: type: QUERY
PREHOOK: Input: default@dim_pho
diff --git ql/src/test/results/clientpositive/union33.q.out ql/src/test/results/clientpositive/union33.q.out
index 7a2fe67..9e8bea4 100644
--- ql/src/test/results/clientpositive/union33.q.out
+++ ql/src/test/results/clientpositive/union33.q.out
@@ -116,7 +116,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Union
@@ -354,7 +354,7 @@ STAGE PLANS:
predicate: (key = 0) (type: boolean)
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Select Operator
- expressions: key (type: string), value (type: string)
+ expressions: '0' (type: string), value (type: string)
outputColumnNames: _col0, _col1
Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE
Union
diff --git ql/src/test/results/compiler/plan/cast1.q.xml ql/src/test/results/compiler/plan/cast1.q.xml
index 1143bc4..5b9bbf8 100644
--- ql/src/test/results/compiler/plan/cast1.q.xml
+++ ql/src/test/results/compiler/plan/cast1.q.xml
@@ -378,7 +378,7 @@