Index: contrib/src/test/results/clientpositive/dboutput.q.out =================================================================== --- contrib/src/test/results/clientpositive/dboutput.q.out (revision 1083142) +++ contrib/src/test/results/clientpositive/dboutput.q.out (working copy) @@ -143,21 +143,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: dboutput('jdbc:derby:../build/test_dboutput_db','','','INSERT INTO app_info (kkey,vvalue) VALUES (?,?)',key,value) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: dboutput('jdbc:derby:../build/test_dboutput_db','','','INSERT INTO app_info (kkey,vvalue) VALUES (?,?)',key,value) + type: int + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: contrib/src/test/results/clientpositive/serde_typedbytes4.q.out =================================================================== --- contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (revision 1083142) +++ contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (working copy) @@ -50,42 +50,38 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: UDFToByte(key) - type: tinyint - expr: value - type: string - outputColumnNames: _col0, _col1 - Transform Operator - command: /bin/cat - output info: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - Select Operator - expressions: + Select Operator + expressions: + expr: UDFToByte(key) + type: tinyint + expr: value + type: string + outputColumnNames: _col0, _col1 + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator Index: hbase-handler/src/test/results/hbase_pushdown.q.out =================================================================== --- hbase-handler/src/test/results/hbase_pushdown.q.out (revision 1083142) +++ hbase-handler/src/test/results/hbase_pushdown.q.out (working copy) @@ -103,23 +103,19 @@ predicate: expr: (value like '%90%') type: boolean - Filter Operator - predicate: - expr: ((key = 90) and (value like '%90%')) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -166,23 +162,19 @@ predicate: expr: ((value like '%90%') and (key = UDFToInteger(value))) type: boolean - Filter Operator - predicate: - expr: (((key = 90) and (value like '%90%')) and (key = UDFToInteger(value))) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -217,23 +209,19 @@ predicate: expr: (((key = 80) and (key = 90)) and (value like '%90%')) type: boolean - Filter Operator - predicate: - expr: (((key = 80) and (key = 90)) and (value like '%90%')) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -300,23 +288,19 @@ predicate: expr: (CASE WHEN ((key = 90)) THEN (2) ELSE (4) END > 3) type: boolean - Filter Operator - predicate: - expr: (CASE WHEN ((key = 90)) THEN (2) ELSE (4) END > 3) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -353,23 +337,19 @@ predicate: expr: ((key = 80) or (value like '%90%')) type: boolean - Filter Operator - predicate: - expr: ((key = 80) or (value like '%90%')) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -402,23 +382,19 @@ predicate: expr: (key = 90) type: boolean - Filter Operator - predicate: - expr: (key = 90) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: hbase-handler/src/test/results/hbase_queries.q.out =================================================================== --- hbase-handler/src/test/results/hbase_queries.q.out (revision 1083142) +++ hbase-handler/src/test/results/hbase_queries.q.out (working copy) @@ -50,32 +50,28 @@ predicate: expr: ((key % 2) = 0) type: boolean - Filter Operator - predicate: - expr: ((key % 2) = 0) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat - output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat - serde: org.apache.hadoop.hive.hbase.HBaseSerDe - name: default.hbase_table_1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat + output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat + serde: org.apache.hadoop.hive.hbase.HBaseSerDe + name: default.hbase_table_1 PREHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0 @@ -306,24 +302,20 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: int - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 0 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 0 y:hbase_table_2 TableScan alias: hbase_table_2 @@ -331,31 +323,27 @@ predicate: expr: (key < 120) type: boolean - Filter Operator - predicate: - expr: (key < 120) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (revision 1083142) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (working copy) @@ -675,6 +675,24 @@ } } + public void removeParent(Operator parent) { + int parentIndex = parentOperators.indexOf(parent); + assert parentIndex != -1; + if (parentOperators.size() == 1) { + parentOperators = null; + } else { + parentOperators.remove(parentIndex); + } + + int childIndex = parent.getChildOperators().indexOf(this); + assert childIndex != -1; + if (parent.getChildOperators().size() == 1) { + parent.setChildOperators(null); + } else { + parent.getChildOperators().remove(childIndex); + } + } + /** * Replace one parent with another at the same position. Chilren of the new * parent are not updated Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (revision 1083142) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (working copy) @@ -66,21 +66,30 @@ private RowResolver toRR = null; /** - * this map contains a expr infos. Each key is a node in the expression tree - * and the information for each node is the value which is used while walking - * the tree by its parent. + * Values the expression sub-trees (predicates) that can be pushed down for + * root expression tree. Since there can be more than one alias in an + * expression tree, this is a map from the alias to predicates. */ private final Map> pushdownPreds; + /** - * Values the expression sub-trees (predicates) that can be pushed down for + * Values the expression sub-trees (predicates) that can not be pushed down for * root expression tree. Since there can be more than one alias in an * expression tree, this is a map from the alias to predicates. */ + private final Map> nonFinalPreds; + + /** + * this map contains a expr infos. Each key is a node in the expression tree + * and the information for each node is the value which is used while walking + * the tree by its parent. + */ private final Map exprInfoMap; private boolean isDeterministic = true; public ExprWalkerInfo() { pushdownPreds = new HashMap>(); + nonFinalPreds = new HashMap>(); exprInfoMap = new HashMap(); } @@ -91,6 +100,7 @@ pushdownPreds = new HashMap>(); exprInfoMap = new HashMap(); + nonFinalPreds = new HashMap>(); } /** @@ -214,6 +224,19 @@ } /** + * Adds the passed list of pushDowns for the alias. + * + * @param alias + * @param pushDowns + */ + public void addPushDowns(String alias, List pushDowns) { + if (pushdownPreds.get(alias) == null) { + pushdownPreds.put(alias, new ArrayList()); + } + pushdownPreds.get(alias).addAll(pushDowns); + } + + /** * Returns the list of pushdown expressions for each alias that appear in the * current operator's RowResolver. The exprs in each list can be combined * using conjunction (AND). @@ -225,6 +248,28 @@ } /** + * Adds the specified expr as a non-final candidate + * + * @param expr + */ + public void addNonFinalCandidate(ExprNodeDesc expr) { + String alias = getAlias(expr); + if (nonFinalPreds.get(alias) == null) { + nonFinalPreds.put(alias, new ArrayList()); + } + nonFinalPreds.get(alias).add(expr.clone()); + } + + /** + * Returns list of non-final candidate predicate for each map. + * + * @return + */ + public Map> getNonFinalCandidates() { + return nonFinalPreds; + } + + /** * Merges the specified pushdown predicates with the current class. * * @param ewi Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (revision 1083142) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (working copy) @@ -288,6 +288,8 @@ if (ctx.isCandidate(expr)) { ctx.addFinalCandidate(expr); return; + } else if (!FunctionRegistry.isOpAnd(expr)) { + ctx.addNonFinalCandidate(expr); } if (FunctionRegistry.isOpAnd(expr)) { Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (revision 1083142) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (working copy) @@ -43,7 +43,6 @@ import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler; -import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.parse.OpParseContext; import org.apache.hadoop.hive.ql.parse.RowResolver; @@ -88,6 +87,22 @@ // script operator is a black-box to hive so no optimization here // assuming that nothing can be pushed above the script op // same with LIMIT op + // create a filter with all children predicates + OpWalkerInfo owi = (OpWalkerInfo) procCtx; + ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, null, false); + return createFilter((Operator)nd, unpushedPreds, owi); + } + + } + + public static class UDTFPPD extends DefaultPPD implements NodeProcessor { + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + LOG.info("Processing for " + nd.getName() + "(" + + ((Operator) nd).getIdentifier() + ")"); + //Predicates for UDTF wont be candidates for its children. So, nothing to + //optimize here. See lateral_view_ppd.q for example. return null; } @@ -102,6 +117,10 @@ + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; + // The lateral view forward operator has 2 children, a SELECT(*) and + // a SELECT(cols) (for the UDTF operator) The child at index 0 is the + // SELECT(*) because that's the way that the DAG was constructed. We + // only want to get the predicates from the SELECT(*). ExprWalkerInfo childPreds = owi .getPrunedPreds((Operator) nd.getChildren() .get(0)); @@ -146,22 +165,29 @@ OpWalkerInfo owi = (OpWalkerInfo) procCtx; Operator op = (Operator) nd; ExprNodeDesc predicate = (((FilterOperator) nd).getConf()).getPredicate(); - // get pushdown predicates for this operator's predicate - ExprWalkerInfo ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, - predicate); - if (!ewi.isDeterministic()) { - /* predicate is not deterministic */ - if (op.getChildren() != null && op.getChildren().size() == 1) { - createFilter(op, owi - .getPrunedPreds((Operator) (op - .getChildren().get(0))), owi); + ExprWalkerInfo ewi = new ExprWalkerInfo(); + // Don't push a sampling predicate since createFilter() always creates filter + // with isSamplePred = false. Also, the filterop with sampling pred is always + // a child of TableScan, so there is no need to push this predicate. + if (!((FilterOperator)op).getConf().getIsSamplingPred()) { + // get pushdown predicates for this operator's predicate + ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, predicate); + if (!ewi.isDeterministic()) { + /* predicate is not deterministic */ + if (op.getChildren() != null && op.getChildren().size() == 1) { + createFilter(op, owi + .getPrunedPreds((Operator) (op + .getChildren().get(0))), owi); + } + return null; } - - return null; + // add this filter for deletion, if it does not have non-final candidates + if (ewi.getNonFinalCandidates().values().isEmpty()) { + owi.addCandidateFilterOp((FilterOperator)op); + } + logExpr(nd, ewi); + owi.putPrunedPreds((Operator) nd, ewi); } - - logExpr(nd, ewi); - owi.putPrunedPreds(op, ewi); // merge it with children predicates mergeWithChildrenPred(op, owi, ewi, null, false); @@ -182,8 +208,12 @@ OpWalkerInfo owi = (OpWalkerInfo) procCtx; Set aliases = getQualifiedAliases((JoinOperator) nd, owi .getRowResolver(nd)); - mergeWithChildrenPred(nd, owi, null, aliases, false); - return null; + boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, aliases, false); + if (hasUnpushedPredicates) { + aliases = null; + } + ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, aliases, false); + return createFilter((Operator)nd, unpushedPreds, owi); } /** @@ -283,7 +313,12 @@ Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); - mergeWithChildrenPred(nd, (OpWalkerInfo) procCtx, null, null, false); + OpWalkerInfo owi = (OpWalkerInfo) procCtx; + boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, null, false); + if (hasUnpushedPredicates) { + ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, null, false); + return createFilter((Operator)nd, unpushedPreds, owi); + } return null; } @@ -318,20 +353,21 @@ * @param ignoreAliases * @throws SemanticException */ - protected void mergeWithChildrenPred(Node nd, OpWalkerInfo owi, + protected boolean mergeWithChildrenPred(Node nd, OpWalkerInfo owi, ExprWalkerInfo ewi, Set aliases, boolean ignoreAliases) throws SemanticException { + boolean hasUnpushedPredicates = false; if (nd.getChildren() == null || nd.getChildren().size() > 1) { // ppd for multi-insert query is not yet implemented // no-op for leafs - return; + return hasUnpushedPredicates; } Operator op = (Operator) nd; ExprWalkerInfo childPreds = owi .getPrunedPreds((Operator) nd.getChildren() .get(0)); if (childPreds == null) { - return; + return hasUnpushedPredicates; } if (ewi == null) { ewi = new ExprWalkerInfo(); @@ -344,12 +380,41 @@ // input8.q ExprWalkerInfo extractPushdownPreds = ExprWalkerProcFactory .extractPushdownPreds(owi, op, e.getValue()); + if (!extractPushdownPreds.getNonFinalCandidates().isEmpty()) { + hasUnpushedPredicates = true; + } ewi.merge(extractPushdownPreds); logExpr(nd, extractPushdownPreds); } } owi.putPrunedPreds((Operator) nd, ewi); + return hasUnpushedPredicates; } + + protected ExprWalkerInfo mergeChildrenPred(Node nd, OpWalkerInfo owi, + Set excludedAliases, boolean ignoreAliases) + throws SemanticException { + if (nd.getChildren() == null) { + return null; + } + Operator op = (Operator) nd; + ExprWalkerInfo ewi = new ExprWalkerInfo(); + for (Operator child : op.getChildOperators()) { + ExprWalkerInfo childPreds = owi.getPrunedPreds(child); + if (childPreds == null) { + continue; + } + for (Entry> e : childPreds + .getFinalCandidates().entrySet()) { + if (ignoreAliases || excludedAliases == null || + !excludedAliases.contains(e.getKey()) || e.getKey() == null) { + ewi.addPushDowns(e.getKey(), e.getValue()); + logExpr(nd, ewi); + } + } + } + return ewi; + } } protected static Object createFilter(Operator op, @@ -423,6 +488,21 @@ } OpParseContext ctx = new OpParseContext(inputRR); owi.put(output, ctx); + + // remove the candidate filter ops + for (FilterOperator fop : owi.getCandidateFilterOps()) { + List> children = fop.getChildOperators(); + List> parents = fop.getParentOperators(); + for (Operator parent : parents) { + parent.getChildOperators().addAll(children); + parent.removeChild(fop); + } + for (Operator child : children) { + child.getParentOperators().addAll(parents); + child.removeParent(fop); + } + } + owi.getCandidateFilterOps().clear(); return output; } @@ -530,7 +610,7 @@ } public static NodeProcessor getUDTFProc() { - return new ScriptPPD(); + return new UDTFPPD(); } public static NodeProcessor getLVFProc() { Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (revision 1083142) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (working copy) @@ -18,9 +18,12 @@ package org.apache.hadoop.hive.ql.ppd; import java.io.Serializable; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -39,11 +42,13 @@ private final HashMap, ExprWalkerInfo> opToPushdownPredMap; private final Map, OpParseContext> opToParseCtxMap; private final ParseContext pGraphContext; + private final List candidateFilterOps; public OpWalkerInfo(ParseContext pGraphContext) { this.pGraphContext = pGraphContext; opToParseCtxMap = pGraphContext.getOpParseCtx(); opToPushdownPredMap = new HashMap, ExprWalkerInfo>(); + candidateFilterOps = new ArrayList(); } public ExprWalkerInfo getPrunedPreds(Operator op) { @@ -67,4 +72,13 @@ public ParseContext getParseContext() { return pGraphContext; } + + public List getCandidateFilterOps() { + return candidateFilterOps; + } + + public void addCandidateFilterOp(FilterOperator fop) { + candidateFilterOps.add(fop); + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (revision 1083142) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (working copy) @@ -17,13 +17,10 @@ */ package org.apache.hadoop.hive.ql.ppd; -import java.io.Serializable; import java.util.ArrayList; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; -import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; import org.apache.hadoop.hive.ql.lib.Dispatcher; @@ -33,7 +30,6 @@ import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; import org.apache.hadoop.hive.ql.optimizer.Transform; -import org.apache.hadoop.hive.ql.parse.OpParseContext; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -73,12 +69,10 @@ public class PredicatePushDown implements Transform { private ParseContext pGraphContext; - private HashMap, OpParseContext> opToParseCtxMap; @Override public ParseContext transform(ParseContext pctx) throws SemanticException { pGraphContext = pctx; - opToParseCtxMap = pGraphContext.getOpParseCtx(); // create a the context for walking operators OpWalkerInfo opWalkerInfo = new OpWalkerInfo(pGraphContext); Index: ql/src/test/results/clientpositive/auto_join0.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join0.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join0.q.out (working copy) @@ -52,26 +52,22 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [] - 1 [] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [] + 1 [] + Position of Big Table: 0 Stage: Stage-5 Map Reduce @@ -83,46 +79,42 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [] - 1 [] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [] + 1 [] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -220,26 +212,22 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [] - 1 [] - Position of Big Table: 1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [] + 1 [] + Position of Big Table: 1 Stage: Stage-6 Map Reduce @@ -251,46 +239,42 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [] - 1 [] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [] + 1 [] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -304,25 +288,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: 0 + value expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string a:src2:src TableScan alias: src @@ -330,25 +310,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: 1 + value expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join11.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join11.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join11.q.out (working copy) @@ -73,41 +73,37 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} - 1 {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} + 1 {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col3 - Position of Big Table: 0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -159,19 +155,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 1 Stage: Stage-5 Map Reduce @@ -235,22 +227,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src Index: ql/src/test/results/clientpositive/auto_join12.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join12.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join12.q.out (working copy) @@ -82,21 +82,17 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-5 Map Reduce @@ -113,44 +109,40 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 0 to 2 - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + outputColumnNames: _col0, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col3 - Position of Big Table: 0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -205,21 +197,17 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + Position of Big Table: 1 src3:src TableScan alias: src @@ -232,21 +220,17 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + Position of Big Table: 1 Stage: Stage-6 Map Reduce @@ -320,21 +304,17 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + Position of Big Table: 2 src2:src TableScan alias: src @@ -372,44 +352,40 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 0 to 2 - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + outputColumnNames: _col0, _col3 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col3 - Position of Big Table: 2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -428,22 +404,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src @@ -478,19 +450,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join13.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join13.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join13.q.out (working copy) @@ -85,28 +85,24 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - outputColumnNames: _col0, _col2, _col3 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col2, _col3 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -132,19 +128,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col3} {_col0} - 1 - handleSkewJoin: false - keys: - 0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()] - 1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col3} {_col0} + 1 + handleSkewJoin: false + keys: + 0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()] + 1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()] + Position of Big Table: 0 Stage: Stage-6 Map Reduce @@ -246,41 +238,37 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col3} {_col0} - 1 - handleSkewJoin: false - keys: - 0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()] - 1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()] - outputColumnNames: _col1, _col2 - Position of Big Table: 1 - Select Operator - expressions: - expr: _col2 - type: string - expr: _col1 - type: string - outputColumnNames: _col2, _col1 - Group By Operator - aggregations: - expr: sum(hash(_col2,_col1)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col3} {_col0} + 1 + handleSkewJoin: false + keys: + 0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()] + 1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()] + outputColumnNames: _col1, _col2 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col2 + type: string + expr: _col1 + type: string + outputColumnNames: _col2, _col1 + Group By Operator + aggregations: + expr: sum(hash(_col2,_col1)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -314,19 +302,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - Reduce Output Operator - key expressions: - expr: UDFToDouble(_col0) - type: double - sort order: + - Map-reduce partition columns: - expr: UDFToDouble(_col0) - type: double - tag: 1 + Reduce Output Operator + key expressions: + expr: UDFToDouble(_col0) + type: double + sort order: + + Map-reduce partition columns: + expr: UDFToDouble(_col0) + type: double + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -375,19 +359,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 1 Stage: Stage-10 Map Reduce @@ -438,22 +418,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src Index: ql/src/test/results/clientpositive/auto_join14.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join14.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join14.q.out (working copy) @@ -38,19 +38,15 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-4 Map Reduce @@ -62,44 +58,40 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {key} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col0, _col5 - Position of Big Table: 0 + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col5 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col5 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -130,33 +122,7 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 1 - - Stage: Stage-5 - Map Reduce - Alias -> Map Operator Tree: - srcpart - TableScan - alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 + HashTable Sink Operator condition expressions: 0 {key} 1 {value} @@ -164,30 +130,48 @@ keys: 0 [Column[key]] 1 [Column[key]] - outputColumnNames: _col0, _col5 Position of Big Table: 1 + + Stage: Stage-5 + Map Reduce + Alias -> Map Operator Tree: + srcpart + TableScan + alias: srcpart + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col5 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col5 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -201,29 +185,6 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - srcpart - TableScan - alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean Reduce Output Operator key expressions: expr: key @@ -232,10 +193,25 @@ Map-reduce partition columns: expr: key type: string - tag: 1 + tag: 0 value expressions: - expr: value + expr: key type: string + srcpart + TableScan + alias: srcpart + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: value + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join16.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join16.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join16.q.out (working copy) @@ -65,56 +65,44 @@ predicate: expr: ((key > 10) and (key > 20)) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 > 20) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0], Column[_col1]] - 1 [Column[key], Column[value]] - outputColumnNames: _col0, _col3 - Position of Big Table: 0 - Filter Operator - predicate: - expr: (_col3 < 200) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0], Column[_col1]] + 1 [Column[key], Column[value]] + outputColumnNames: _col0, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col3 + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -161,30 +149,22 @@ predicate: expr: ((key > 10) and (key > 20)) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 > 20) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0], Column[_col1]] - 1 [Column[key], Column[value]] - Position of Big Table: 1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0], Column[_col1]] + 1 [Column[key], Column[value]] + Position of Big Table: 1 Stage: Stage-5 Map Reduce @@ -208,29 +188,25 @@ 1 [Column[key], Column[value]] outputColumnNames: _col0, _col3 Position of Big Table: 1 - Filter Operator - predicate: - expr: (_col3 < 200) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col3 + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -244,37 +220,29 @@ predicate: expr: ((key > 10) and (key > 20)) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 > 20) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string tab TableScan alias: tab @@ -307,29 +275,25 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col3 - Filter Operator - predicate: - expr: (_col3 < 200) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col3 + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/auto_join19.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join19.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join19.q.out (working copy) @@ -68,32 +68,28 @@ 1 [Column[key]] outputColumnNames: _col0, _col2, _col3, _col7 Position of Big Table: 0 - Filter Operator - predicate: - expr: (((_col2 = '2008-04-08') or (_col2 = '2008-04-09')) and ((_col3 = '12') or (_col3 = '11'))) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -148,32 +144,28 @@ 1 [Column[key]] outputColumnNames: _col0, _col2, _col3, _col7 Position of Big Table: 1 - Filter Operator - predicate: - expr: (((_col2 = '2008-04-08') or (_col2 = '2008-04-09')) and ((_col3 = '12') or (_col3 = '11'))) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -223,32 +215,28 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col2, _col3, _col7 - Filter Operator - predicate: - expr: (((_col2 = '2008-04-08') or (_col2 = '2008-04-09')) and ((_col3 = '12') or (_col3 = '11'))) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) Index: ql/src/test/results/clientpositive/auto_join20.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join20.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join20.q.out (working copy) @@ -49,25 +49,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 a:src3 TableScan alias: src3 @@ -245,25 +241,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 a:src2 TableScan alias: src2 @@ -342,24 +334,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string a:src2 TableScan alias: src2 @@ -506,25 +494,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 a:src3 TableScan alias: src3 @@ -554,50 +538,46 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Right Outer Join0 to 2 - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Position of Big Table: 1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + Right Outer Join0 to 2 + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -710,25 +690,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 a:src2 TableScan alias: src2 @@ -736,25 +712,21 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 Stage: Stage-7 Map Reduce @@ -815,24 +787,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string a:src2 TableScan alias: src2 @@ -840,24 +808,20 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string a:src3 TableScan alias: src3 Index: ql/src/test/results/clientpositive/auto_join21.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join21.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join21.q.out (working copy) @@ -38,25 +38,21 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 {(key < 10)} - 1 - 2 {(key < 10)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 {(key < 10)} + 1 + 2 {(key < 10)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 src3 TableScan alias: src3 @@ -202,25 +198,21 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 {(key < 10)} - 1 - 2 {(key < 10)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 {(key < 10)} + 1 + 2 {(key < 10)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 Stage: Stage-6 Map Reduce @@ -298,24 +290,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 Index: ql/src/test/results/clientpositive/auto_join23.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join23.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join23.q.out (working copy) @@ -67,27 +67,23 @@ 1 [] outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -171,27 +167,23 @@ 1 [] outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 1 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -237,27 +229,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/auto_join4.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join4.q.out (working copy) @@ -62,26 +62,22 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-4 Map Reduce @@ -93,29 +89,36 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Left Outer Join0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 0 Select Operator expressions: expr: _col0 @@ -129,34 +132,23 @@ outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -183,31 +175,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -215,31 +203,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join5.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join5.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join5.q.out (working copy) @@ -62,26 +62,22 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 1 Stage: Stage-4 Map Reduce @@ -93,29 +89,36 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Right Outer Join0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 1 Select Operator expressions: expr: _col0 @@ -129,34 +132,23 @@ outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -183,31 +175,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -215,31 +203,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join6.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join6.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join6.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join7.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join7.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join7.q.out (working copy) @@ -62,31 +62,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -94,31 +90,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:c:src3 TableScan alias: src3 @@ -126,31 +118,27 @@ predicate: expr: ((key > 20) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 20) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 2 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join8.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join8.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join8.q.out (working copy) @@ -62,26 +62,22 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-4 Map Reduce @@ -93,29 +89,29 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Left Outer Join0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 0 + Filter Operator + predicate: + expr: (_col2 is null and _col0 is not null) + type: boolean Select Operator expressions: expr: _col0 @@ -127,40 +123,36 @@ expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Filter Operator - predicate: - expr: (_col2 is null and _col0 is not null) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -187,31 +179,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -219,31 +207,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -253,21 +237,21 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Filter Operator - predicate: - expr: (_col2 is null and _col0 is not null) - type: boolean + Filter Operator + predicate: + expr: (_col2 is null and _col0 is not null) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: expr: _col0 Index: ql/src/test/results/clientpositive/auto_join9.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join9.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/auto_join9.q.out (working copy) @@ -66,32 +66,28 @@ 1 [Column[key]] outputColumnNames: _col0, _col2, _col3, _col7 Position of Big Table: 0 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -146,32 +142,28 @@ 1 [Column[key]] outputColumnNames: _col0, _col2, _col3, _col7 Position of Big Table: 1 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -221,32 +213,28 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col2, _col3, _col7 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) Index: ql/src/test/results/clientpositive/bucket2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/bucket2.q.out (working copy) @@ -190,28 +190,24 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/bucket3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/bucket3.q.out (working copy) @@ -211,36 +211,28 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Filter Operator - predicate: - expr: (ds = '1') - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col2 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/bucket4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/bucket4.q.out (working copy) @@ -195,23 +195,19 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/bucket_groupby.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_groupby.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/bucket_groupby.q.out (working copy) @@ -44,36 +44,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '100') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -183,36 +179,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -291,36 +283,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean - Select Operator - expressions: - expr: key - type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: length(key) + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: + expr: length(key) + type: int + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -390,36 +378,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean - Select Operator - expressions: - expr: key - type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: abs(length(key)) + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: + expr: abs(length(key)) + type: int + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -491,42 +475,38 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + expr: 3 + type: int + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: 3 + expr: _col1 type: int - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: int - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: int - tag: -1 - value expressions: - expr: _col2 - type: bigint + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: int + tag: -1 + value expressions: + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -607,41 +587,37 @@ subq:clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean + Select Operator + expressions: + expr: value + type: string + outputColumnNames: _col0 Select Operator expressions: - expr: value + expr: _col0 type: string outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1250,36 +1226,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '102') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1360,36 +1332,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '102') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: value type: string - outputColumnNames: value - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1470,44 +1438,40 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '102') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string expr: value type: string - outputColumnNames: key, value - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1637,36 +1601,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '103') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1751,44 +1711,40 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '103') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + outputColumnNames: value, key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: value type: string expr: key type: string - outputColumnNames: value, key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: key + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -152,46 +152,41 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-32-14_620_3045094958113552377/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-32-14_620_3045094958113552377/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297330334 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_07-53-32_368_8826092606561337219/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_07-53-32_368_8826092606561337219/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300373612 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -583,50 +578,45 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-32-48_438_2652635181723360989/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-32-48_438_2652635181723360989/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 464 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 - transient_lastDdlTime 1297330356 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_07-54-56_871_7781415598740755814/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_07-54-56_871_7781415598740755814/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 464 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 8983 + transient_lastDdlTime 1300373666 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -104,20 +104,15 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Bucket Mapjoin Context: Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt]} @@ -556,23 +551,27 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col1, _col5 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string outputColumnNames: _col0, _col1, _col5 - Position of Big Table: 1 Select Operator expressions: expr: _col0 @@ -581,46 +580,37 @@ type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-34-03_556_4720240850245784464/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-34-03_556_4720240850245784464/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 - transient_lastDdlTime 1297330431 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_07-58-13_974_1529622891391007635/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_07-58-13_974_1529622891391007635/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 0 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 + transient_lastDdlTime 1300373863 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -104,20 +104,15 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Bucket Mapjoin Context: Alias Bucket Base File Name Mapping: b {srcbucket22.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket23.txt=[srcbucket21.txt, srcbucket23.txt]} @@ -134,23 +129,27 @@ TableScan alias: a GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col1, _col6 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col6 + type: string outputColumnNames: _col0, _col1, _col6 - Position of Big Table: 0 Select Operator expressions: expr: _col0 @@ -159,42 +158,33 @@ type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-34-44_204_2973712557284504853/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-34-44_204_2973712557284504853/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297330484 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_08-00-00_039_7908278162778397164/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_08-00-00_039_7908278162778397164/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300374000 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -540,20 +530,15 @@ TableScan alias: a GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 1 Bucket Mapjoin Context: Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt], srcbucket22.txt=[srcbucket22.txt], srcbucket23.txt=[srcbucket23.txt]} @@ -572,23 +557,27 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col1, _col6 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col6 + type: string outputColumnNames: _col0, _col1, _col6 - Position of Big Table: 1 Select Operator expressions: expr: _col0 @@ -597,46 +586,37 @@ type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-35-18_368_6277373757642429866/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-35-18_368_6277373757642429866/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 564 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 - transient_lastDdlTime 1297330506 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_08-01-24_769_3828540946798609217/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_08-01-24_769_3828540946798609217/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 564 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11067 + transient_lastDdlTime 1300374054 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy) @@ -114,46 +114,41 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-38-44_469_6200016761088760446/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_01-38-44_469_6200016761088760446/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297330724 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_08-10-33_158_849824621459487084/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_08-10-33_158_849824621459487084/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300374633 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/case_sensitivity.q.out =================================================================== --- ql/src/test/results/clientpositive/case_sensitivity.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/case_sensitivity.q.out (working copy) @@ -33,25 +33,21 @@ predicate: expr: (lint[0] > 0) type: boolean - Filter Operator - predicate: - expr: (lint[0] > 0) - type: boolean - Select Operator - expressions: - expr: lint[1] - type: int - expr: lintstring[0].MYSTRING - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: lint[1] + type: int + expr: lintstring[0].MYSTRING + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/cast1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/cast1.q.out (working copy) @@ -31,35 +31,31 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: (3 + 2) - type: int - expr: (3.0 + 2) - type: double - expr: (3 + 2.0) - type: double - expr: (3.0 + 2.0) - type: double - expr: ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0))) - type: int - expr: UDFToBoolean(1) - type: boolean - expr: UDFToInteger(true) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: (3 + 2) + type: int + expr: (3.0 + 2) + type: double + expr: (3 + 2.0) + type: double + expr: (3.0 + 2.0) + type: double + expr: ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0))) + type: int + expr: UDFToBoolean(1) + type: boolean + expr: UDFToInteger(true) + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/cluster.q.out =================================================================== --- ql/src/test/results/clientpositive/cluster.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/cluster.q.out (working copy) @@ -22,31 +22,27 @@ predicate: expr: (key = 10) type: boolean - Filter Operator - predicate: - expr: (key = 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -94,31 +90,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -166,31 +158,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -238,31 +226,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -310,31 +294,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -382,31 +362,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -454,31 +430,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -640,25 +612,21 @@ 1 {VALUE._col0} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -768,27 +736,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -900,27 +864,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -1030,25 +990,21 @@ 1 {VALUE._col0} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -1129,39 +1085,35 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string null-subquery2:unioninput-subquery2:src TableScan alias: src @@ -1169,39 +1121,35 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/combine2.q.out =================================================================== --- ql/src/test/results/clientpositive/combine2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/combine2.q.out (working copy) @@ -86,28 +86,24 @@ combine2 TableScan alias: combine2 - Filter Operator - predicate: - expr: value is not null - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -193,24 +189,19 @@ TableScan alias: combine2 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: value is not null - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 [combine2] @@ -699,36 +690,32 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: ds + type: string + outputColumnNames: ds + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: ds type: string - outputColumnNames: ds - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: ds + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/create_view.q.out =================================================================== --- ql/src/test/results/clientpositive/create_view.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/create_view.q.out (working copy) @@ -194,23 +194,19 @@ expr: value type: string outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 = 18) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out =================================================================== --- ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (working copy) @@ -190,28 +190,24 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/filter_join_breaktask.q.out =================================================================== --- ql/src/test/results/clientpositive/filter_join_breaktask.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/filter_join_breaktask.q.out (working copy) @@ -48,28 +48,18 @@ predicate: expr: key is not null type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: key is not null - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 0 - value expressions: - expr: key - type: int + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 0 + value expressions: + expr: key + type: int m TableScan alias: m @@ -79,25 +69,20 @@ predicate: expr: (value is not null and (value <> '')) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 1 - value expressions: - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 1 + value expressions: + expr: value + type: string + expr: ds + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [f, m] @@ -159,36 +144,21 @@ 1 {VALUE._col1} {VALUE._col2} handleSkewJoin: false outputColumnNames: _col0, _col6, _col7 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col7 = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: _col6 is not null - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 <> '') - type: boolean - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_01-47-50_156_524306712693849500/-mr-10002 - NumFilesPerFileSink: 1 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - properties: - columns _col0,_col6 - columns.types int,string - escape.delim \ - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_08-32-48_789_2608190679065378579/-mr-10002 + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col6,_col7 + columns.types int,string,string + escape.delim \ + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-2 Map Reduce @@ -210,23 +180,18 @@ TableScan alias: g GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: value - type: string - sort order: + - Map-reduce partition columns: - expr: value - type: string - tag: 1 - value expressions: - expr: value - type: string + Reduce Output Operator + key expressions: + expr: value + type: string + sort order: + + Map-reduce partition columns: + expr: value + type: string + tag: 1 + value expressions: + expr: value + type: string Needs Tagging: true Path -> Alias: file:/tmp/sdong/hive_2011-02-10_01-47-50_156_524306712693849500/-mr-10002 [$INTNAME] @@ -238,15 +203,15 @@ input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: - columns _col0,_col6 - columns.types int,string + columns _col0,_col6,_col7 + columns.types int,string,string escape.delim \ input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: - columns _col0,_col6 - columns.types int,string + columns _col0,_col6,_col7 + columns.types int,string,string escape.delim \ pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 Partition Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy) @@ -33,46 +33,41 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: count(DISTINCT substr(value, 5)) + expr: sum(substr(value, 5)) + bucketGroup: false + keys: + expr: substr(key, 1, 1) type: string - expr: value + expr: substr(value, 5) type: string - outputColumnNames: key, value - Group By Operator - aggregations: - expr: count(DISTINCT substr(value, 5)) - expr: sum(substr(value, 5)) - bucketGroup: false - keys: - expr: substr(key, 1, 1) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: substr(value, 5) + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint - expr: _col3 - type: double + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint + expr: _col3 + type: double Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] Index: ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (working copy) @@ -33,56 +33,51 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: count(DISTINCT substr(value, 5)) + expr: sum(substr(value, 5)) + expr: sum(DISTINCT substr(value, 5)) + expr: count(DISTINCT value) + bucketGroup: false + keys: + expr: substr(key, 1, 1) type: string + expr: substr(value, 5) + type: string expr: value type: string - outputColumnNames: key, value - Group By Operator - aggregations: - expr: count(DISTINCT substr(value, 5)) - expr: sum(substr(value, 5)) - expr: sum(DISTINCT substr(value, 5)) - expr: count(DISTINCT value) - bucketGroup: false - keys: - expr: substr(key, 1, 1) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: substr(value, 5) + expr: _col1 type: string - expr: value + expr: _col2 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - sort order: +++ - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col3 - type: bigint - expr: _col4 - type: double - expr: _col5 - type: double - expr: _col6 - type: bigint + sort order: +++ + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col3 + type: bigint + expr: _col4 + type: double + expr: _col5 + type: double + expr: _col6 + type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] Index: ql/src/test/results/clientpositive/groupby_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy) @@ -33,29 +33,24 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Reduce Output Operator + key expressions: + expr: substr(key, 1, 1) type: string - expr: value + expr: substr(value, 5) type: string - outputColumnNames: key, value - Reduce Output Operator - key expressions: - expr: substr(key, 1, 1) - type: string - expr: substr(value, 5) - type: string - sort order: ++ - Map-reduce partition columns: - expr: substr(key, 1, 1) - type: string - tag: -1 + sort order: ++ + Map-reduce partition columns: + expr: substr(key, 1, 1) + type: string + tag: -1 Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] Index: ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (working copy) @@ -33,31 +33,26 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Reduce Output Operator + key expressions: + expr: substr(key, 1, 1) type: string + expr: substr(value, 5) + type: string expr: value type: string - outputColumnNames: key, value - Reduce Output Operator - key expressions: - expr: substr(key, 1, 1) - type: string - expr: substr(value, 5) - type: string - expr: value - type: string - sort order: +++ - Map-reduce partition columns: - expr: substr(key, 1, 1) - type: string - tag: -1 + sort order: +++ + Map-reduce partition columns: + expr: substr(key, 1, 1) + type: string + tag: -1 Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] Index: ql/src/test/results/clientpositive/having.q.out =================================================================== --- ql/src/test/results/clientpositive/having.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/having.q.out (working copy) @@ -153,23 +153,19 @@ type: string mode: mergepartial outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 <> 302) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -798,38 +794,34 @@ predicate: expr: (key > 300) type: boolean - Filter Operator - predicate: - expr: (key > 300) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: max(value) + bucketGroup: false + keys: expr: key type: string - expr: value - type: string - outputColumnNames: key, value - Group By Operator - aggregations: - expr: max(value) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: string Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/implicit_cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/implicit_cast1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/implicit_cast1.q.out (working copy) @@ -31,23 +31,19 @@ predicate: expr: (a <> 0) type: boolean - Filter Operator - predicate: - expr: (a <> 0) - type: boolean - Select Operator - expressions: - expr: a - type: bigint - expr: b - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: a + type: bigint + expr: b + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/input11.q.out =================================================================== --- ql/src/test/results/clientpositive/input11.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input11.q.out (working copy) @@ -33,32 +33,28 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input11_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input11_limit.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input11_limit.q.out (working copy) @@ -30,26 +30,22 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract Limit Index: ql/src/test/results/clientpositive/input14.q.out =================================================================== --- ql/src/test/results/clientpositive/input14.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input14.q.out (working copy) @@ -48,48 +48,48 @@ output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/input18.q.out =================================================================== --- ql/src/test/results/clientpositive/input18.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input18.q.out (working copy) @@ -52,48 +52,48 @@ output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: regexp_replace(_col1, ' ', '+') + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: regexp_replace(_col1, ' ', '+') - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/input23.q.out =================================================================== --- ql/src/test/results/clientpositive/input23.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input23.q.out (working copy) @@ -106,47 +106,42 @@ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7, _col8, _col9 - Filter Operator - isSamplingPred: false - predicate: - expr: ((((_col2 = '2008-04-08') and (_col3 = '11')) and (_col8 = '2008-04-08')) and (_col9 = '14')) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col6 - type: string - expr: _col7 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_15-54-28_256_717236059530733661/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_15-54-28_256_717236059530733661/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 - columns.types string:string:string:string:string:string:string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_09-04-29_865_6849719999466698028/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_09-04-29_865_6849719999466698028/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 + columns.types string:string:string:string:string:string:string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/input24.q.out =================================================================== --- ql/src/test/results/clientpositive/input24.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input24.q.out (working copy) @@ -30,23 +30,19 @@ x TableScan alias: x - Filter Operator - predicate: - expr: (d = '2009-01-01') - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/input25.q.out =================================================================== --- ql/src/test/results/clientpositive/input25.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input25.q.out (working copy) @@ -47,30 +47,26 @@ null-subquery1:subq-subquery1:x TableScan alias: x - Filter Operator - predicate: - expr: (d = '2009-01-01') - type: boolean - Select Operator - expressions: - expr: a - type: int - expr: b - type: int - expr: d - type: string - outputColumnNames: _col0, _col1, _col2 - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: string + Select Operator + expressions: + expr: a + type: int + expr: b + type: int + expr: d + type: string + outputColumnNames: _col0, _col1, _col2 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: int + expr: _col2 + type: string Reduce Operator Tree: Extract Limit @@ -125,30 +121,26 @@ null-subquery2:subq-subquery2:x TableScan alias: x - Filter Operator - predicate: - expr: (d = '2009-02-02') - type: boolean - Select Operator - expressions: - expr: a - type: int - expr: b - type: int - expr: d - type: string - outputColumnNames: _col0, _col1, _col2 - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: string + Select Operator + expressions: + expr: a + type: int + expr: b + type: int + expr: d + type: string + outputColumnNames: _col0, _col1, _col2 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: int + expr: _col2 + type: string Reduce Operator Tree: Extract Limit Index: ql/src/test/results/clientpositive/input26.q.out =================================================================== --- ql/src/test/results/clientpositive/input26.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input26.q.out (working copy) @@ -28,36 +28,32 @@ null-subquery1:subq-subquery1:a TableScan alias: a - Filter Operator - predicate: - expr: ((ds = '2008-04-08') and (hr = '11')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - expr: ds + expr: _col1 type: string - expr: hr + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col3 + type: string Reduce Operator Tree: Extract Limit @@ -120,34 +116,30 @@ predicate: expr: ((ds = '2008-04-08') and (hr = '14')) type: boolean - Filter Operator - predicate: - expr: ((ds = '2008-04-08') and (hr = '14')) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Reduce Operator Tree: Extract Limit Index: ql/src/test/results/clientpositive/input2_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input2_limit.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input2_limit.q.out (working copy) @@ -22,24 +22,20 @@ predicate: expr: (key < 300) type: boolean - Filter Operator - predicate: - expr: (key < 300) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/input31.q.out =================================================================== --- ql/src/test/results/clientpositive/input31.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input31.q.out (working copy) @@ -35,23 +35,19 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/input39.q.out =================================================================== --- ql/src/test/results/clientpositive/input39.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input39.q.out (working copy) @@ -121,23 +121,19 @@ 1 {VALUE._col2} handleSkewJoin: false outputColumnNames: _col2, _col7 - Filter Operator - predicate: - expr: ((_col2 = '1') and (_col7 = '1')) - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/input42.q.out =================================================================== --- ql/src/test/results/clientpositive/input42.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input42.q.out (working copy) @@ -19,39 +19,34 @@ TableScan alias: a GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col3 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 type: string - expr: hr + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col3 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] @@ -1199,39 +1194,34 @@ predicate: expr: (key < 200) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (key < 200)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col3 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 type: string - expr: hr + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col3 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] Index: ql/src/test/results/clientpositive/input6.q.out =================================================================== --- ql/src/test/results/clientpositive/input6.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input6.q.out (working copy) @@ -33,25 +33,21 @@ predicate: expr: key is null type: boolean - Filter Operator - predicate: - expr: key is null - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input9.q.out =================================================================== --- ql/src/test/results/clientpositive/input9.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input9.q.out (working copy) @@ -33,32 +33,28 @@ predicate: expr: (null = null) type: boolean - Filter Operator - predicate: - expr: (null = null) - type: boolean + Select Operator + expressions: + expr: null + type: string + expr: key + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: null - type: string - expr: key - type: string + expr: _col0 + type: void + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: void - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -35,59 +35,54 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((key < 100) and (ds = '2008-04-08')) and (hr = '12')) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value + expr: _col2 type: string - expr: hr + expr: _col3 type: string - expr: ds - type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_15-59-53_966_4370880936716856186/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_15-59-53_966_4370880936716856186/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,hr,ds - columns.types int:string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382393 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-16-00_167_373354768081382011/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-16-00_167_373354768081382011/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,hr,ds + columns.types int:string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300378560 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart] Index: ql/src/test/results/clientpositive/input_part5.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part5.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input_part5.q.out (working copy) @@ -33,29 +33,25 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: ((ds = '2008-04-08') and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input_part6.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part6.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input_part6.q.out (working copy) @@ -22,28 +22,24 @@ predicate: expr: (ds = ((2008 - 4) - 8)) type: boolean - Filter Operator - predicate: - expr: (ds = ((2008 - 4) - 8)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/input_part7.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part7.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input_part7.q.out (working copy) @@ -34,25 +34,31 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: expr: _col0 type: string expr: _col1 @@ -61,28 +67,17 @@ type: string expr: _col3 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: ++++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + sort order: ++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string null-subquery2:a-subquery2:y TableScan alias: y @@ -92,25 +87,31 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: expr: _col0 type: string expr: _col1 @@ -119,28 +120,17 @@ type: string expr: _col3 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: ++++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + sort order: ++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [null-subquery1:a-subquery1:x, null-subquery2:a-subquery2:y] Index: ql/src/test/results/clientpositive/input_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part9.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input_part9.q.out (working copy) @@ -24,39 +24,34 @@ predicate: expr: key is not null type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key is not null and (ds = '2008-04-08')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col3 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 type: string - expr: hr + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col3 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [x] Index: ql/src/test/results/clientpositive/input_testxpath2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input_testxpath2.q.out (working copy) @@ -33,27 +33,23 @@ predicate: expr: (lint is not null and (not mstringstring is null)) type: boolean - Filter Operator - predicate: - expr: (lint is not null and (not mstringstring is null)) - type: boolean - Select Operator - expressions: - expr: size(lint) - type: int - expr: size(lintstring) - type: int - expr: size(mstringstring) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: size(lint) + type: int + expr: size(lintstring) + type: int + expr: size(mstringstring) + type: int + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input_testxpath4.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/input_testxpath4.q.out (working copy) @@ -107,23 +107,19 @@ predicate: expr: ((mstringstring['key_9'] is not null and lintstring.myint is not null) and lintstring is not null) type: boolean - Filter Operator - predicate: - expr: ((mstringstring['key_9'] is not null and lintstring.myint is not null) and lintstring is not null) - type: boolean - Select Operator - expressions: - expr: mstringstring['key_9'] - type: string - expr: lintstring.myint - type: array - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: mstringstring['key_9'] + type: string + expr: lintstring.myint + type: array + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/join0.q.out =================================================================== --- ql/src/test/results/clientpositive/join0.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join0.q.out (working copy) @@ -33,25 +33,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: 0 + value expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string src2:src TableScan alias: src @@ -59,25 +55,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: 1 + value expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join11.q.out =================================================================== --- ql/src/test/results/clientpositive/join11.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join11.q.out (working copy) @@ -37,22 +37,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src Index: ql/src/test/results/clientpositive/join12.q.out =================================================================== --- ql/src/test/results/clientpositive/join12.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join12.q.out (working copy) @@ -43,22 +43,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src @@ -93,19 +89,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join13.q.out =================================================================== --- ql/src/test/results/clientpositive/join13.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join13.q.out (working copy) @@ -44,22 +44,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src @@ -130,19 +126,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - Reduce Output Operator - key expressions: - expr: UDFToDouble(_col0) - type: double - sort order: + - Map-reduce partition columns: - expr: UDFToDouble(_col0) - type: double - tag: 1 + Reduce Output Operator + key expressions: + expr: UDFToDouble(_col0) + type: double + sort order: + + Map-reduce partition columns: + expr: UDFToDouble(_col0) + type: double + tag: 1 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join14.q.out =================================================================== --- ql/src/test/results/clientpositive/join14.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join14.q.out (working copy) @@ -30,29 +30,6 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - srcpart - TableScan - alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean Reduce Output Operator key expressions: expr: key @@ -61,10 +38,25 @@ Map-reduce partition columns: expr: key type: string - tag: 1 + tag: 0 value expressions: - expr: value + expr: key type: string + srcpart + TableScan + alias: srcpart + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: value + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join16.q.out =================================================================== --- ql/src/test/results/clientpositive/join16.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join16.q.out (working copy) @@ -20,37 +20,29 @@ predicate: expr: ((key > 10) and (key > 20)) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 > 20) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string tab TableScan alias: tab @@ -83,23 +75,19 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col3 - Filter Operator - predicate: - expr: (_col3 < 200) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/join19.q.out =================================================================== --- ql/src/test/results/clientpositive/join19.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join19.q.out (working copy) @@ -135,27 +135,23 @@ predicate: expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Citation')) type: boolean - Filter Operator - predicate: - expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Citation')) - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string t22:t2 TableScan alias: t2 @@ -163,29 +159,25 @@ predicate: expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') type: boolean - Filter Operator - predicate: - expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + expr: object + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: object + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col1 + type: string t33:t3 TableScan alias: t3 @@ -193,29 +185,25 @@ predicate: expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_from') type: boolean - Filter Operator - predicate: - expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_from') - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + expr: object + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 type: string - expr: object + sort order: + + Map-reduce partition columns: + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: 2 - value expressions: - expr: _col0 - type: string + tag: 2 + value expressions: + expr: _col0 + type: string Reduce Operator Tree: Join Operator condition map: @@ -261,24 +249,20 @@ predicate: expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Author')) type: boolean - Filter Operator - predicate: - expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Author')) - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 t55:t5 TableScan alias: t5 @@ -286,29 +270,25 @@ predicate: expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_to') type: boolean - Filter Operator - predicate: - expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_to') - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + expr: object + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: object + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 - value expressions: - expr: _col1 - type: string + tag: 2 + value expressions: + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -356,29 +336,25 @@ predicate: expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') type: boolean - Filter Operator - predicate: - expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + expr: object + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: object + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join20.q.out =================================================================== --- ql/src/test/results/clientpositive/join20.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join20.q.out (working copy) @@ -25,24 +25,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -746,24 +742,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -771,24 +763,20 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 Index: ql/src/test/results/clientpositive/join21.q.out =================================================================== --- ql/src/test/results/clientpositive/join21.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join21.q.out (working copy) @@ -40,24 +40,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 Index: ql/src/test/results/clientpositive/join23.q.out =================================================================== --- ql/src/test/results/clientpositive/join23.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join23.q.out (working copy) @@ -55,27 +55,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -76,75 +76,65 @@ TableScan alias: z GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (hr = 11) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 0 to 2 - condition expressions: - 0 {key} - 1 {value} - 2 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col5, _col9 - Position of Big Table: 2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col5 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col5, _col9 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-03-56_680_8440893894140638044/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-03-56_680_8440893894140638044/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382636 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {key} + 1 {value} + 2 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col5, _col9 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col5, _col9 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col9 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-25-32_631_7413274741403705596/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-25-32_631_7413274741403705596/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300379132 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/join28.q.out =================================================================== --- ql/src/test/results/clientpositive/join28.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join28.q.out (working copy) @@ -57,23 +57,15 @@ z TableScan alias: z - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - predicate: - expr: (hr = 11) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -139,25 +139,15 @@ TableScan alias: z GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (hr = 11) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col5} {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col1]] - 1 [Column[value]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col5} {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col1]] + 1 [Column[value]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -155,28 +155,18 @@ TableScan alias: z GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (hr = 11) - type: boolean - Reduce Output Operator - key expressions: - expr: value - type: string - sort order: + - Map-reduce partition columns: - expr: value - type: string - tag: 1 - value expressions: - expr: value - type: string + Reduce Output Operator + key expressions: + expr: value + type: string + sort order: + + Map-reduce partition columns: + expr: value + type: string + tag: 1 + value expressions: + expr: value + type: string Needs Tagging: true Path -> Alias: file:/tmp/sdong/hive_2011-02-10_16-05-42_624_7730493356150230026/-mr-10002 [file:/tmp/sdong/hive_2011-02-10_16-05-42_624_7730493356150230026/-mr-10002] Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -69,75 +69,70 @@ predicate: expr: (key < 20) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key < 20) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col1} - 1 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col1} + 1 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + outputColumnNames: _col1, _col2, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col1, _col2, _col3 - Position of Big Table: 0 Select Operator expressions: - expr: _col1 - type: string expr: _col2 type: string expr: _col3 type: string - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-05-57_676_6075966104051319240/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-05-57_676_6075966104051319240/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382757 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + expr: _col1 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-30-27_511_6544642459407208280/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-30-27_511_6544642459407208280/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300379427 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false null-subquery2:subq1-subquery2:x1 TableScan alias: x1 @@ -147,75 +142,70 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col1} - 1 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col1} + 1 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + outputColumnNames: _col1, _col2, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col1, _col2, _col3 - Position of Big Table: 0 Select Operator expressions: - expr: _col1 - type: string expr: _col2 type: string expr: _col3 type: string - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-05-57_676_6075966104051319240/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-05-57_676_6075966104051319240/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382757 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + expr: _col1 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-30-27_511_6544642459407208280/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-30-27_511_6544642459407208280/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300379427 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -50,37 +50,32 @@ predicate: expr: (key < 20) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key < 20) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x] @@ -472,37 +467,32 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1] Index: ql/src/test/results/clientpositive/join38.q.out =================================================================== --- ql/src/test/results/clientpositive/join38.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join38.q.out (working copy) @@ -143,44 +143,40 @@ expr: _col15 type: string outputColumnNames: _col1, _col9, _col15 - Filter Operator - predicate: - expr: (_col15 = 111) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: _col1 + type: string + expr: _col9 + type: string + outputColumnNames: _col1, _col9 + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col1 type: string expr: _col9 type: string - outputColumnNames: _col1, _col9 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string expr: _col1 type: string - expr: _col9 + sort order: ++ + Map-reduce partition columns: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/join39.q.out =================================================================== --- ql/src/test/results/clientpositive/join39.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join39.q.out (working copy) @@ -40,26 +40,22 @@ predicate: expr: (key <= 100) type: boolean - Filter Operator - predicate: - expr: (key <= 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[_col0]] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/join4.q.out =================================================================== --- ql/src/test/results/clientpositive/join4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join4.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join40.q.out =================================================================== --- ql/src/test/results/clientpositive/join40.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join40.q.out (working copy) @@ -39,31 +39,27 @@ predicate: expr: (key <= 100) type: boolean - Filter Operator - predicate: - expr: (key <= 100) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -1812,24 +1808,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -2533,24 +2525,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -2558,24 +2546,20 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -3266,26 +3250,22 @@ predicate: expr: (key <= 100) type: boolean - Filter Operator - predicate: - expr: (key <= 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[_col0]] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/join5.q.out =================================================================== --- ql/src/test/results/clientpositive/join5.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join5.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join6.q.out =================================================================== --- ql/src/test/results/clientpositive/join6.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join6.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join7.q.out =================================================================== --- ql/src/test/results/clientpositive/join7.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join7.q.out (working copy) @@ -62,31 +62,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -94,31 +90,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:c:src3 TableScan alias: src3 @@ -126,31 +118,27 @@ predicate: expr: ((key > 20) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 20) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 2 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join8.q.out =================================================================== --- ql/src/test/results/clientpositive/join8.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join8.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -118,21 +110,21 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Filter Operator - predicate: - expr: (_col2 is null and _col0 is not null) - type: boolean + Filter Operator + predicate: + expr: (_col2 is null and _col0 is not null) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: expr: _col0 Index: ql/src/test/results/clientpositive/join9.q.out =================================================================== --- ql/src/test/results/clientpositive/join9.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -150,51 +150,46 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col2, _col3, _col7 - Filter Operator - isSamplingPred: false - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-08-44_532_2940878602076923711/-ext-10000 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-08-44_532_2940878602076923711/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382924 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-37-30_985_6005635493455371640/-ext-10000 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-37-30_985_6005635493455371640/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300379850 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -106,46 +106,41 @@ expr: _col11 type: string outputColumnNames: _col0, _col5, _col9, _col10, _col11 - Filter Operator - isSamplingPred: false - predicate: - expr: ((_col10 = '2008-04-08') and (_col11 = 11)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-22-31_310_4336533938712499116/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-22-31_310_4336533938712499116/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297383751 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: _col9 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-18_04-32-20_912_5170652234039967837/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-18_04-32-20_912_5170652234039967837/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300447940 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -598,50 +593,45 @@ expr: _col11 type: string outputColumnNames: _col0, _col5, _col9, _col10, _col11 - Filter Operator - isSamplingPred: false - predicate: - expr: ((_col10 = '2008-04-08') and (_col11 = 11)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-22-51_531_5040920332969210429/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-22-51_531_5040920332969210429/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - numFiles 1 - numPartitions 0 - numRows 107 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2125 - transient_lastDdlTime 1297383758 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: _col9 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-18_04-33-14_273_1487553032904896430/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-18_04-33-14_273_1487553032904896430/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + numFiles 1 + numPartitions 0 + numRows 107 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2125 + transient_lastDdlTime 1300447960 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -814,7 +804,6 @@ name: default.dest_j1 name: default.dest_j1 - PREHOOK: query: INSERT OVERWRITE TABLE dest_j1 SELECT /*+ MAPJOIN(x,y) */ x.key, z.value, y.value FROM src1_copy x JOIN src_copy y ON (x.key = y.key) Index: ql/src/test/results/clientpositive/lateral_view_ppd.q.out =================================================================== --- ql/src/test/results/clientpositive/lateral_view_ppd.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/lateral_view_ppd.q.out (working copy) @@ -34,23 +34,19 @@ expr: _col2 type: int outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -69,23 +65,19 @@ expr: _col2 type: int outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -136,19 +128,19 @@ SELECT * : (no compute) Lateral View Join Operator outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: - expr: ((_col0 = '0') and (_col2 = 1)) - type: boolean + Filter Operator + predicate: + expr: ((_col0 = '0') and (_col2 = 1)) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1, _col2 Select Operator expressions: expr: _col1 @@ -171,19 +163,19 @@ function name: explode Lateral View Join Operator outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: - expr: ((_col0 = '0') and (_col2 = 1)) - type: boolean + Filter Operator + predicate: + expr: ((_col0 = '0') and (_col2 = 1)) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1, _col2 Select Operator expressions: expr: _col1 @@ -248,24 +240,20 @@ expr: _col3 type: string outputColumnNames: _col1, _col4, _col2, _col3 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col4 - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col4 + type: int + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -286,24 +274,20 @@ expr: _col3 type: string outputColumnNames: _col1, _col4, _col2, _col3 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col4 - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col4 + type: int + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -371,23 +355,19 @@ expr: _col0 type: string outputColumnNames: _col1, _col2, _col0 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -406,23 +386,19 @@ expr: _col0 type: string outputColumnNames: _col1, _col2, _col0 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -446,23 +422,19 @@ expr: _col0 type: string outputColumnNames: _col1, _col2, _col0 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -481,23 +453,19 @@ expr: _col0 type: string outputColumnNames: _col1, _col2, _col0 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/load_dyn_part10.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part10.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/load_dyn_part10.q.out (working copy) @@ -44,27 +44,23 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds > '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part10 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part10 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/load_dyn_part13.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part13.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/load_dyn_part13.q.out (working copy) @@ -62,37 +62,33 @@ predicate: expr: (key < 20) type: boolean - Filter Operator - predicate: - expr: (key < 20) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: '22' - type: string - outputColumnNames: _col0, _col1, _col2 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part13 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: '22' + type: string + outputColumnNames: _col0, _col1, _col2 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part13 null-subquery2:s-subquery2:src TableScan alias: src @@ -100,37 +96,33 @@ predicate: expr: ((key > 20) and (key < 40)) type: boolean - Filter Operator - predicate: - expr: ((key > 20) and (key < 40)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: '33' - type: string - outputColumnNames: _col0, _col1, _col2 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part13 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: '33' + type: string + outputColumnNames: _col0, _col1, _col2 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part13 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/load_dyn_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/load_dyn_part2.q.out (working copy) @@ -38,32 +38,28 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds is not null and hr is not null) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + sort order: + Map-reduce partition columns: + expr: _col0 type: string - expr: value + tag: -1 + value expressions: + expr: _col0 type: string - expr: hr + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - sort order: - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/load_dyn_part3.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/load_dyn_part3.q.out (working copy) @@ -42,29 +42,25 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds is not null and hr is not null) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part3 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part3 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/load_dyn_part4.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/load_dyn_part4.q.out (working copy) @@ -54,29 +54,25 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds is not null and hr is not null) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part4 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part4 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/load_dyn_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part9.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/load_dyn_part9.q.out (working copy) @@ -44,29 +44,25 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds <= '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part9 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part9 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy) @@ -54,25 +54,20 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [a] @@ -209,7 +204,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) + expr: ((_col4 > 15) and (_col4 < 25)) type: boolean Select Operator expressions: @@ -575,7 +570,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col6 > 15)) and (_col6 < 25)) + expr: ((_col6 > 15) and (_col6 < 25)) type: boolean Select Operator expressions: @@ -942,7 +937,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) and (_col6 = '2008-04-08')) + expr: (((_col4 > 15) and (_col4 < 25)) and (_col6 = '2008-04-08')) type: boolean Select Operator expressions: @@ -1225,7 +1220,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col6 > 15)) and (_col6 < 25)) and (_col2 = '2008-04-08')) + expr: ((_col6 > 15) and (_col6 < 25)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/mapjoin_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin_distinct.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/mapjoin_distinct.q.out (working copy) @@ -31,19 +31,15 @@ d TableScan alias: d - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -51,28 +47,24 @@ c TableScan alias: c - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col1 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col1 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -217,19 +209,15 @@ d TableScan alias: d - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -237,28 +225,24 @@ c TableScan alias: c - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col1 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col1 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -376,19 +360,15 @@ d TableScan alias: d - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -396,28 +376,24 @@ c TableScan alias: c - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col1 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col1 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -555,19 +531,15 @@ d TableScan alias: d - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -575,28 +547,24 @@ c TableScan alias: c - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col1 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col1 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work Index: ql/src/test/results/clientpositive/mapjoin_subquery.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin_subquery.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/mapjoin_subquery.q.out (working copy) @@ -46,23 +46,15 @@ z TableScan alias: z - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - predicate: - expr: (hr = 11) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -310,23 +302,15 @@ z TableScan alias: z - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - predicate: - expr: (hr = 11) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/merge3.q.out =================================================================== --- ql/src/test/results/clientpositive/merge3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/merge3.q.out (working copy) @@ -2268,47 +2268,42 @@ TableScan alias: merge_src_part GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-41-50_240_5312287851356242830/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-41-50_240_5312287851356242830/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/merge_src_part2 - name default.merge_src_part2 - partition_columns ds - serialization.ddl struct merge_src_part2 { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297384910 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_src_part2 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-18_05-22-26_995_1205514312202255668/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-18_05-22-26_995_1205514312202255668/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/merge_src_part2 + name default.merge_src_part2 + partition_columns ds + serialization.ddl struct merge_src_part2 { string key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300450946 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_src_part2 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/merge_src_part/ds=2008-04-08 [merge_src_part] @@ -4647,33 +4642,28 @@ TableScan alias: merge_src_part GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + sort order: + Map-reduce partition columns: + expr: _col2 type: string - expr: value + tag: -1 + value expressions: + expr: _col0 type: string - expr: ds + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - sort order: - Map-reduce partition columns: - expr: _col2 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/merge_src_part/ds=2008-04-08 [s:merge_src_part] @@ -4928,6 +4918,7 @@ name: default.merge_src_part2 name: default.merge_src_part2 + PREHOOK: query: from (select * from merge_src_part where ds is not null distribute by ds) s insert overwrite table merge_src_part2 partition(ds) select key, value, ds Index: ql/src/test/results/clientpositive/merge4.q.out =================================================================== --- ql/src/test/results/clientpositive/merge4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/merge4.q.out (working copy) @@ -27,27 +27,23 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part Stage: Stage-5 Conditional Operator @@ -1145,25 +1141,21 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part Stage: Stage-5 Conditional Operator @@ -2776,25 +2768,21 @@ null-subquery1:s-subquery1:srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/merge_dynamic_partition.q.out =================================================================== --- ql/src/test/results/clientpositive/merge_dynamic_partition.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/merge_dynamic_partition.q.out (working copy) @@ -49,27 +49,23 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-0 Move Operator @@ -655,25 +651,21 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-5 Conditional Operator @@ -1289,29 +1281,25 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: ((ds = '2008-04-08') and (hr = 11)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out =================================================================== --- ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out (working copy) @@ -62,27 +62,23 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out =================================================================== --- ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out (working copy) @@ -100,29 +100,25 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: (ds >= '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/no_hooks.q.out =================================================================== --- ql/src/test/results/clientpositive/no_hooks.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/no_hooks.q.out (working copy) @@ -52,27 +52,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/noalias_subq1.q.out =================================================================== --- ql/src/test/results/clientpositive/noalias_subq1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/noalias_subq1.q.out (working copy) @@ -29,21 +29,17 @@ expr: key type: string outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col1 < 100) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/notable_alias1.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/notable_alias1.q.out (working copy) @@ -30,36 +30,32 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/notable_alias2.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/notable_alias2.q.out (working copy) @@ -30,36 +30,32 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/nullgroup.q.out (working copy) @@ -22,23 +22,19 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -96,23 +92,19 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -171,20 +163,16 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - Reduce Output Operator - sort order: - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 - value expressions: - expr: 1 - type: int + Select Operator + Reduce Output Operator + sort order: + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: @@ -266,17 +254,13 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: 1 - type: int + Select Operator + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup2.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/nullgroup2.q.out (working copy) @@ -23,36 +23,32 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -147,36 +143,32 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -239,27 +231,23 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Reduce Output Operator + key expressions: expr: key type: string - outputColumnNames: key - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 - value expressions: - expr: 1 - type: int + sort order: + + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: @@ -354,27 +342,23 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Reduce Output Operator + key expressions: expr: key type: string - outputColumnNames: key - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: -1 - value expressions: - expr: 1 - type: int + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup4.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/nullgroup4.q.out (working copy) @@ -23,39 +23,35 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Group By Operator + aggregations: + expr: count(1) + expr: count(DISTINCT value) + bucketGroup: false + keys: expr: value type: string - outputColumnNames: value - Group By Operator - aggregations: - expr: count(1) - expr: count(DISTINCT value) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint - expr: _col2 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -143,36 +139,32 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Group By Operator + aggregations: + expr: count(1) + expr: count(DISTINCT value) + bucketGroup: false + keys: expr: value type: string - outputColumnNames: value - Group By Operator - aggregations: - expr: count(1) - expr: count(DISTINCT value) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col1 - type: bigint - expr: _col2 - type: bigint + sort order: + + tag: -1 + value expressions: + expr: _col1 + type: bigint + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -234,27 +226,23 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Reduce Output Operator + key expressions: expr: value type: string - outputColumnNames: value - Reduce Output Operator - key expressions: - expr: value - type: string - sort order: + - Map-reduce partition columns: - expr: value - type: string - tag: -1 - value expressions: - expr: 1 - type: int + sort order: + + Map-reduce partition columns: + expr: value + type: string + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: @@ -342,24 +330,20 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Reduce Output Operator + key expressions: expr: value type: string - outputColumnNames: value - Reduce Output Operator - key expressions: - expr: value - type: string - sort order: + - tag: -1 - value expressions: - expr: 1 - type: int + sort order: + + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out (working copy) @@ -22,43 +22,39 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Group By Operator + aggregations: + expr: count(1) + expr: count(DISTINCT value) + expr: count(DISTINCT substr(value, 5)) + bucketGroup: false + keys: expr: value type: string - outputColumnNames: value - Group By Operator - aggregations: - expr: count(1) - expr: count(DISTINCT value) - expr: count(DISTINCT substr(value, 5)) - bucketGroup: false - keys: - expr: value + expr: substr(value, 5) + type: string + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: substr(value, 5) + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col2 - type: bigint - expr: _col3 - type: bigint - expr: _col4 - type: bigint + sort order: ++ + tag: -1 + value expressions: + expr: _col2 + type: bigint + expr: _col3 + type: bigint + expr: _col4 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -122,26 +118,22 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Reduce Output Operator + key expressions: expr: value type: string - outputColumnNames: value - Reduce Output Operator - key expressions: - expr: value - type: string - expr: substr(value, 5) - type: string - sort order: ++ - tag: -1 - value expressions: - expr: 1 - type: int + expr: substr(value, 5) + type: string + sort order: ++ + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup5.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup5.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/nullgroup5.q.out (working copy) @@ -52,38 +52,6 @@ predicate: expr: (ds = '2009-04-05') type: boolean - Filter Operator - predicate: - expr: (ds = '2009-04-05') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - null-subquery2:u-subquery2:y - TableScan - alias: y - Filter Operator - predicate: - expr: (ds = '2009-04-09') - type: boolean Select Operator expressions: expr: key @@ -105,6 +73,30 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + null-subquery2:u-subquery2:y + TableScan + alias: y + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy) @@ -288,7 +288,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) + expr: (((_col4 > 15) and (_col4 < 25)) and ((_col0 > 10) and (_col0 < 20))) type: boolean Select Operator expressions: @@ -650,7 +650,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) and (_col6 = '2008-04-08')) + expr: ((((_col4 > 15) and (_col4 < 25)) and (_col6 = '2008-04-08')) and ((_col0 > 10) and (_col0 < 20))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/pcr.q.out =================================================================== --- ql/src/test/results/clientpositive/pcr.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/pcr.q.out (working copy) @@ -81,35 +81,30 @@ predicate: expr: (key < 5) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds <= '2000-04-09') and (key < 5)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -291,29 +286,24 @@ predicate: expr: ((ds <= '2000-04-09') or (key < 5)) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds <= '2000-04-09') or (key < 5)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -581,35 +571,30 @@ predicate: expr: ((key < 5) and (value <> 'val_2')) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds <= '2000-04-09') and (key < 5)) and (value <> 'val_2')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -793,35 +778,30 @@ predicate: expr: (((ds < '2000-04-09') and (key < 5)) or ((ds > '2000-04-09') and (value = 'val_5'))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds < '2000-04-09') and (key < 5)) or ((ds > '2000-04-09') and (value = 'val_5'))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1007,35 +987,30 @@ predicate: expr: (((ds < '2000-04-10') and (key < 5)) or ((ds > '2000-04-08') and (value = 'val_5'))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds < '2000-04-10') and (key < 5)) or ((ds > '2000-04-08') and (value = 'val_5'))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1280,35 +1255,30 @@ predicate: expr: (((ds < '2000-04-10') or (key < 5)) and ((ds > '2000-04-08') or (value = 'val_5'))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds < '2000-04-10') or (key < 5)) and ((ds > '2000-04-08') or (value = 'val_5'))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1561,31 +1531,26 @@ predicate: expr: (key = 14) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds = '2000-04-08') or (ds = '2000-04-09')) and (key = 14)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1752,31 +1717,26 @@ TableScan alias: pcr_t1 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2000-04-08') or (ds = '2000-04-09')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1983,31 +1943,26 @@ TableScan alias: pcr_t1 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds >= '2000-04-08') or (ds < '2000-04-10')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -2290,37 +2245,32 @@ predicate: expr: (((ds = '2000-04-08') and (key = 1)) or ((ds = '2000-04-09') and (key = 2))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds = '2000-04-08') and (key = 1)) or ((ds = '2000-04-09') and (key = 2))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - expr: ds + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + sort order: +++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -2489,52 +2439,42 @@ TableScan alias: t1 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2000-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 0 - value expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 0 + value expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string t2 TableScan alias: t2 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2000-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 1 - value expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 1 + value expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [t2, t1] @@ -2772,52 +2712,42 @@ TableScan alias: t1 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2000-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 0 - value expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 0 + value expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string t2 TableScan alias: t2 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2000-04-09') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 1 - value expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 1 + value expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [t1] @@ -3128,37 +3058,32 @@ predicate: expr: (((ds > '2000-04-08') and (ds < '2000-04-11')) or (key = 2)) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds > '2000-04-08') and (ds < '2000-04-11')) or (((ds >= '2000-04-08') and (ds <= '2000-04-11')) and (key = 2))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - expr: ds + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + sort order: +++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -3478,37 +3403,32 @@ predicate: expr: ((ds > '2000-04-08') or ((ds <= '2000-04-09') and (key = 2))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds > '2000-04-08') and (ds < '2000-04-11')) or ((ds <= '2000-04-09') and (key = 2))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - expr: ds + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + sort order: +++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -4671,29 +4591,24 @@ TableScan alias: srcpart GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (hr = 11)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] @@ -4826,41 +4741,36 @@ predicate: expr: (key = 11) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds = '2008-04-08') and ((hr = '11') or (hr = '12'))) and (key = 11)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col2 type: string - expr: ds + expr: _col3 type: string - expr: hr + sort order: +++ + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] @@ -5040,41 +4950,36 @@ predicate: expr: (key = 11) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((hr = '11') and (key = 11)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col2 type: string - expr: ds + expr: _col3 type: string - expr: hr + sort order: +++ + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] Index: ql/src/test/results/clientpositive/ppd1.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd1.q.out (working copy) @@ -22,21 +22,17 @@ predicate: expr: (key > '2') type: boolean - Filter Operator - predicate: - expr: (key > '2') - type: boolean - Select Operator - expressions: - expr: key - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/ppd_clusterby.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_clusterby.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_clusterby.q.out (working copy) @@ -22,31 +22,27 @@ predicate: expr: (key = 10) type: boolean - Filter Operator - predicate: - expr: (key = 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -133,25 +129,21 @@ 1 {VALUE._col0} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/ppd_gby.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_gby.q.out (working copy) @@ -28,38 +28,34 @@ predicate: expr: ((value > 'val_10') and (value > 'val_200')) type: boolean - Filter Operator - predicate: - expr: (value > 'val_10') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + outputColumnNames: value, key + Group By Operator + aggregations: + expr: count(key) + bucketGroup: false + keys: expr: value type: string - expr: key - type: string - outputColumnNames: value, key - Group By Operator - aggregations: - expr: count(key) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -70,17 +66,17 @@ type: string mode: mergepartial outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: ((_col0 > 'val_200') and ((_col1 > 30) or (_col0 < 'val_400'))) - type: boolean + Filter Operator + predicate: + expr: ((_col0 > 'val_200') and ((_col1 > 30) or (_col0 < 'val_400'))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 Index: ql/src/test/results/clientpositive/ppd_gby2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_gby2.q.out (working copy) @@ -31,38 +31,34 @@ predicate: expr: ((value > 'val_10') and (value > 'val_200')) type: boolean - Filter Operator - predicate: - expr: (value > 'val_10') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + outputColumnNames: value, key + Group By Operator + aggregations: + expr: count(key) + bucketGroup: false + keys: expr: value type: string - expr: key - type: string - outputColumnNames: value, key - Group By Operator - aggregations: - expr: count(key) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -73,17 +69,17 @@ type: string mode: mergepartial outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: ((_col0 > 'val_200') and ((_col1 > 30) or (_col0 < 'val_400'))) - type: boolean + Filter Operator + predicate: + expr: ((_col0 > 'val_200') and ((_col1 > 30) or (_col0 < 'val_400'))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 Index: ql/src/test/results/clientpositive/ppd_gby_join.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby_join.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_gby_join.q.out (working copy) @@ -37,35 +37,27 @@ predicate: expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) type: boolean - Filter Operator - predicate: - expr: (key > '1') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src2:src TableScan alias: src @@ -73,27 +65,23 @@ predicate: expr: ((key > '2') and (key <> '4')) type: boolean - Filter Operator - predicate: - expr: (key > '2') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/ppd_join.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_join.q.out (working copy) @@ -34,35 +34,27 @@ predicate: expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) type: boolean - Filter Operator - predicate: - expr: (key > '1') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src2:src TableScan alias: src @@ -70,31 +62,27 @@ predicate: expr: ((key > '2') and (key <> '4')) type: boolean - Filter Operator - predicate: - expr: (key > '2') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/ppd_join2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_join2.q.out (working copy) @@ -41,35 +41,27 @@ predicate: expr: (((key <> '302') and (key < '400')) and ((key <> '311') and ((value <> 'val_50') or (key > '1')))) type: boolean - Filter Operator - predicate: - expr: (key <> '302') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src2:src TableScan alias: src @@ -77,31 +69,27 @@ predicate: expr: ((key <> '305') and (key <> '14')) type: boolean - Filter Operator - predicate: - expr: (key <> '305') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -147,29 +135,25 @@ predicate: expr: ((key <> '306') and (sqrt(key) <> 13)) type: boolean - Filter Operator - predicate: - expr: (key <> '306') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/ppd_join3.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_join3.q.out (working copy) @@ -40,35 +40,27 @@ predicate: expr: (((key <> '11') and (key < '400')) and ((key > '0') and ((value <> 'val_500') or (key > '1')))) type: boolean - Filter Operator - predicate: - expr: (key <> '11') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 < '400') - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src2:src TableScan alias: src @@ -76,31 +68,27 @@ predicate: expr: ((key <> '12') and (key <> '4')) type: boolean - Filter Operator - predicate: - expr: (key <> '12') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string src3:src TableScan alias: src @@ -108,27 +96,23 @@ predicate: expr: ((key <> '13') and (key <> '1')) type: boolean - Filter Operator - predicate: - expr: (key <> '13') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 - value expressions: - expr: _col0 - type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 + value expressions: + expr: _col0 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/ppd_outer_join1.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_outer_join1.q.out (working copy) @@ -76,7 +76,7 @@ outputColumnNames: _col0, _col1, _col4, _col5 Filter Operator predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) + expr: ((_col4 > 15) and (_col4 < 25)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/ppd_outer_join2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_outer_join2.q.out (working copy) @@ -76,7 +76,7 @@ outputColumnNames: _col0, _col1, _col4, _col5 Filter Operator predicate: - expr: ((((_col0 > '10') and (_col0 < '20')) and (_col4 > '15')) and (_col4 < '25')) + expr: ((_col0 > '10') and (_col0 < '20')) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/ppd_outer_join3.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_outer_join3.q.out (working copy) @@ -72,7 +72,7 @@ outputColumnNames: _col0, _col1, _col4, _col5 Filter Operator predicate: - expr: ((((_col0 > '10') and (_col0 < '20')) and (_col4 > '15')) and (_col4 < '25')) + expr: (((_col4 > '15') and (_col4 < '25')) and ((_col0 > '10') and (_col0 < '20'))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/ppd_outer_join4.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_outer_join4.q.out (working copy) @@ -95,7 +95,7 @@ outputColumnNames: _col0, _col1, _col4, _col5, _col8 Filter Operator predicate: - expr: (((((_col0 > '10') and (_col0 < '20')) and (_col4 > '15')) and (_col4 < '25')) and (sqrt(_col8) <> 13)) + expr: ((((_col4 > '15') and (_col4 < '25')) and (sqrt(_col8) <> 13)) and ((_col0 > '10') and (_col0 < '20'))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/ppd_random.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_random.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_random.q.out (working copy) @@ -54,29 +54,25 @@ predicate: expr: (key > '2') type: boolean - Filter Operator - predicate: - expr: (key > '2') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/ppd_transform.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_transform.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_transform.q.out (working copy) @@ -42,39 +42,39 @@ output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/ppd_udf_case.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_udf_case.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_udf_case.q.out (working copy) @@ -91,35 +91,31 @@ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7, _col8, _col9 - Filter Operator - predicate: - expr: (((_col2 = '2008-04-08') and (_col8 = '2008-04-08')) and CASE (_col0) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col6 - type: string - expr: _col7 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/ppd_union.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_union.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/ppd_union.q.out (working copy) @@ -34,35 +34,27 @@ predicate: expr: ((key < '100') and ((key > '4') and (value > 'val_4'))) type: boolean - Filter Operator - predicate: - expr: (key < '100') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Filter Operator - predicate: - expr: ((_col0 > '4') and (_col1 > 'val_4')) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:unioned_query-subquery2:src TableScan alias: src @@ -70,35 +62,27 @@ predicate: expr: ((key > '150') and ((key > '4') and (value > 'val_4'))) type: boolean - Filter Operator - predicate: - expr: (key > '150') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Filter Operator - predicate: - expr: ((_col0 > '4') and (_col1 > 'val_4')) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/query_result_fileformat.q.out =================================================================== --- ql/src/test/results/clientpositive/query_result_fileformat.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/query_result_fileformat.q.out (working copy) @@ -56,23 +56,19 @@ predicate: expr: (key = 'key1') type: boolean - Filter Operator - predicate: - expr: (key = 'key1') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -136,23 +132,19 @@ predicate: expr: (key = 'key1') type: boolean - Filter Operator - predicate: - expr: (key = 'key1') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/quote1.q.out =================================================================== --- ql/src/test/results/clientpositive/quote1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/quote1.q.out (working copy) @@ -33,32 +33,28 @@ predicate: expr: ((key >= 200) and (key < 300)) type: boolean - Filter Operator - predicate: - expr: ((key >= 200) and (key < 300)) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator @@ -122,25 +118,21 @@ predicate: expr: (table = '2008-04-08') type: boolean - Filter Operator - predicate: - expr: (table = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: location - type: int - expr: type - type: string - expr: table - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: location + type: int + expr: type + type: string + expr: table + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy) @@ -143,38 +143,33 @@ predicate: expr: (not ((key > 50) or (key < 10))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds = '2008-04-08') and (not ((key > 50) or (key < 10)))) and (hr like '%2')) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_16-59-31_097_6104330106842070340/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_16-59-31_097_6104330106842070340/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-41-58_245_7085869776801424539/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-41-58_245_7085869776801424539/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] Index: ql/src/test/results/clientpositive/rcfile_null_value.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_null_value.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/rcfile_null_value.q.out (working copy) @@ -106,31 +106,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -138,31 +134,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/reduce_deduplicate.q.out =================================================================== --- ql/src/test/results/clientpositive/reduce_deduplicate.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/reduce_deduplicate.q.out (working copy) @@ -258,52 +258,47 @@ predicate: expr: (ds = '2010-03-29') type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2010-03-29') - type: boolean - Select Operator - expressions: - expr: aet - type: string - expr: aes - type: string - outputColumnNames: _col0, _col1 - Transform Operator - command: cat - output info: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3,_col4,_col5,_col6 - columns.types string,string,int,string,bigint,string,string - field.delim 9 - serialization.format 9 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: int - expr: _col3 - type: string - expr: _col4 - type: bigint - expr: _col5 - type: string - expr: _col6 - type: string + Select Operator + expressions: + expr: aet + type: string + expr: aes + type: string + outputColumnNames: _col0, _col1 + Transform Operator + command: cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6 + columns.types string,string,int,string,bigint,string,string + field.delim 9 + serialization.format 9 + Reduce Output Operator + key expressions: + expr: _col1 + type: string + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: int + expr: _col3 + type: string + expr: _col4 + type: bigint + expr: _col5 + type: string + expr: _col6 + type: string Needs Tagging: false Reduce Operator Tree: Extract Index: ql/src/test/results/clientpositive/regex_col.q.out =================================================================== --- ql/src/test/results/clientpositive/regex_col.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/regex_col.q.out (working copy) @@ -216,27 +216,23 @@ predicate: expr: (key = 103) type: boolean - Filter Operator - predicate: - expr: (key = 103) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - expr: hr - type: string - expr: ds - type: string - sort order: +++ - Map-reduce partition columns: - expr: key - type: string - expr: hr - type: string - expr: ds - type: string - tag: 0 + Reduce Output Operator + key expressions: + expr: key + type: string + expr: hr + type: string + expr: ds + type: string + sort order: +++ + Map-reduce partition columns: + expr: key + type: string + expr: hr + type: string + expr: ds + type: string + tag: 0 b TableScan alias: b Index: ql/src/test/results/clientpositive/regexp_extract.q.out =================================================================== --- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy) @@ -53,20 +53,25 @@ field.delim 9 serialization.format 9 serialization.last.column.takes.rest true - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + isSamplingPred: false + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [tmap:src] @@ -109,34 +114,29 @@ name: default.src Reduce Operator Tree: Extract - Filter Operator - isSamplingPred: false - predicate: - expr: (_col0 < 100) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: regexp_extract(_col1, 'val_(\d+\t\d+)', 1) - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-01-39_424_6724088978945525837/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-01-39_424_6724088978945525837/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: regexp_extract(_col1, 'val_(\d+\t\d+)', 1) + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-02_223_1393307901149024596/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-02_223_1393307901149024596/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator @@ -302,20 +302,25 @@ field.delim 9 serialization.format 9 serialization.last.column.takes.rest true - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + isSamplingPred: false + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [tmap:src] @@ -358,34 +363,29 @@ name: default.src Reduce Operator Tree: Extract - Filter Operator - isSamplingPred: false - predicate: - expr: (_col0 < 100) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: regexp_extract(_col1, 'val_(\d+\t\d+)') - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-01-43_012_4690012969555367930/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-01-43_012_4690012969555367930/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: regexp_extract(_col1, 'val_(\d+\t\d+)') + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-14_408_4797588639767330030/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-14_408_4797588639767330030/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/router_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy) @@ -293,7 +293,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) + expr: ((_col0 > 10) and (_col0 < 20)) type: boolean Select Operator expressions: @@ -401,25 +401,20 @@ TableScan alias: a GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string b TableScan alias: b @@ -579,7 +574,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col6 > 15)) and (_col6 < 25)) + expr: ((_col0 > 10) and (_col0 < 20)) type: boolean Select Operator expressions: @@ -858,7 +853,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) and (_col6 = '2008-04-08')) + expr: ((_col0 > 10) and (_col0 < 20)) type: boolean Select Operator expressions: @@ -1221,7 +1216,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col6 > 15)) and (_col6 < 25)) and (_col2 = '2008-04-08')) + expr: (((_col0 > 10) and (_col0 < 20)) and (_col2 = '2008-04-08')) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- ql/src/test/results/clientpositive/sample1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -39,59 +39,54 @@ predicate: expr: (((hash(rand()) & 2147483647) % 1) = 0) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (hr = '11')) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value + expr: _col2 type: string - expr: ds + expr: _col3 type: string - expr: hr - type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-11_295_7558736421237835141/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-11_295_7558736421237835141/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,dt,hr - columns.types int:string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386131 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-48-42_384_1776363036537587311/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-48-42_384_1776363036537587311/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,dt,hr + columns.types int:string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300387722 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [s] Index: ql/src/test/results/clientpositive/sample10.q.out =================================================================== --- ql/src/test/results/clientpositive/sample10.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample10.q.out (working copy) @@ -62,56 +62,46 @@ alias: srcpartbucket GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 0) - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: - expr: ds + Select Operator + expressions: + expr: ds + type: string + outputColumnNames: ds + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: + expr: ds + type: string + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - outputColumnNames: ds - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: ds - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11/000000_0 [srcpartbucket] - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12/000000_0 [srcpartbucket] - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11/000000_0 [srcpartbucket] - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12/000000_0 [srcpartbucket] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11 [srcpartbucket] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12 [srcpartbucket] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11 [srcpartbucket] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12 [srcpartbucket] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11/000000_0 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11 Partition - base file name: 000000_0 + base file name: hr=11 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat partition values: @@ -124,7 +114,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11 name default.srcpartbucket numFiles 16 numPartitions 4 @@ -134,7 +124,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1297386150 + transient_lastDdlTime 1300387785 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -146,7 +136,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket name default.srcpartbucket numFiles 16 numPartitions 4 @@ -156,13 +146,13 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1297386150 + transient_lastDdlTime 1300387785 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12/000000_0 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12 Partition - base file name: 000000_0 + base file name: hr=12 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat partition values: @@ -175,7 +165,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12 name default.srcpartbucket numFiles 16 numPartitions 4 @@ -185,7 +175,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1297386150 + transient_lastDdlTime 1300387785 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -197,7 +187,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket name default.srcpartbucket numFiles 16 numPartitions 4 @@ -207,13 +197,13 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1297386150 + transient_lastDdlTime 1300387785 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11/000000_0 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11 Partition - base file name: 000000_0 + base file name: hr=11 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat partition values: @@ -226,7 +216,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11 name default.srcpartbucket numFiles 16 numPartitions 4 @@ -236,7 +226,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1297386150 + transient_lastDdlTime 1300387785 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -248,7 +238,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket name default.srcpartbucket numFiles 16 numPartitions 4 @@ -258,13 +248,13 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1297386150 + transient_lastDdlTime 1300387785 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12/000000_0 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12 Partition - base file name: 000000_0 + base file name: hr=12 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat partition values: @@ -277,7 +267,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12 name default.srcpartbucket numFiles 16 numPartitions 4 @@ -287,7 +277,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1297386150 + transient_lastDdlTime 1300387785 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -299,7 +289,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpartbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpartbucket name default.srcpartbucket numFiles 16 numPartitions 4 @@ -309,7 +299,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1297386150 + transient_lastDdlTime 1300387785 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket @@ -333,9 +323,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-02-30_127_2856183230982405624/-ext-10001 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-49-45_265_2712840416519934801/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-02-30_127_2856183230982405624/-ext-10001/ + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-49-45_265_2712840416519934801/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -358,14 +348,14 @@ PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-31_052_5780136652498214851/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-49-45_591_2998519205761890259/-mr-10000 POSTHOOK: query: select ds, count(1) from srcpartbucket tablesample (bucket 1 out of 4 on key) where ds is not null group by ds POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-31_052_5780136652498214851/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-49-45_591_2998519205761890259/-mr-10000 POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -382,14 +372,14 @@ PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-35_963_7688294481780262172/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-50-04_119_484192926926460325/-mr-10000 POSTHOOK: query: select ds, count(1) from srcpartbucket tablesample (bucket 1 out of 2 on key) where ds is not null group by ds POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-35_963_7688294481780262172/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-50-04_119_484192926926460325/-mr-10000 POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -406,14 +396,14 @@ PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-42_219_2445297351151986459/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-50-24_198_2635713830834365407/-mr-10000 POSTHOOK: query: select * from srcpartbucket where ds is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-42_219_2445297351151986459/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-50-24_198_2635713830834365407/-mr-10000 POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- ql/src/test/results/clientpositive/sample2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -35,55 +35,50 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386165 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300387832 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -93,12 +88,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,12 +105,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -127,14 +122,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10002 - destination: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10000 + source: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10002 + destination: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10000 + source: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -144,28 +139,28 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386165 + transient_lastDdlTime 1300387832 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 - tmp directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10001 + tmp directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10000 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -176,12 +171,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386165 + transient_lastDdlTime 1300387832 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 TotalFiles: 1 @@ -189,9 +184,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10002 [pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10002] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10002 [pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10002] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-45_806_6244580131938977166/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-50-32_676_8763136917298824092/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -202,12 +197,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386165 + transient_lastDdlTime 1300387832 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -218,12 +213,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386165 + transient_lastDdlTime 1300387832 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 name: default.dest1 @@ -244,11 +239,11 @@ PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-50_762_1389979276355788996/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-50-45_997_3311359313945248958/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-02-50_762_1389979276355788996/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-50-45_997_3311359313945248958/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 474 val_475 Index: ql/src/test/results/clientpositive/sample3.q.out =================================================================== --- ql/src/test/results/clientpositive/sample3.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample3.q.out (working copy) @@ -26,21 +26,17 @@ predicate: expr: (((hash(key) & 2147483647) % 5) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 5) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- ql/src/test/results/clientpositive/sample4.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -35,55 +35,50 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386175 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300387860 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -93,12 +88,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,12 +105,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -127,14 +122,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10002 - destination: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10000 + source: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10002 + destination: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10000 + source: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -144,28 +139,28 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386175 + transient_lastDdlTime 1300387860 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 - tmp directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10001 + tmp directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10000 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -176,12 +171,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386175 + transient_lastDdlTime 1300387860 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 TotalFiles: 1 @@ -189,9 +184,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10002 [pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10002] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10002 [pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10002] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-02-55_244_271947740222903486/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-00_046_3788223776105379456/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -202,12 +197,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386175 + transient_lastDdlTime 1300387860 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -218,12 +213,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386175 + transient_lastDdlTime 1300387860 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 name: default.dest1 @@ -244,11 +239,11 @@ PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-00_206_7478347031996949185/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-51-15_196_2679318321689117878/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-00_206_7478347031996949185/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-51-15_196_2679318321689117878/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 474 val_475 Index: ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- ql/src/test/results/clientpositive/sample5.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -33,48 +33,43 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 5) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 5) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-00_821_4571890067821547485/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-00_821_4571890067821547485/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386180 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-16_508_7260905036105084900/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-16_508_7260905036105084900/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300387876 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket [s] Index: ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- ql/src/test/results/clientpositive/sample6.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -33,55 +33,50 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386189 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300387903 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -91,12 +86,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -108,12 +103,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -125,14 +120,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10002 - destination: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10000 + source: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10002 + destination: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10000 + source: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -142,28 +137,28 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386189 + transient_lastDdlTime 1300387903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 - tmp directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10001 + tmp directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10000 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -174,12 +169,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386189 + transient_lastDdlTime 1300387903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 TotalFiles: 1 @@ -187,9 +182,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10002 [pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10002] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10002 [pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10002] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-09_307_5006808238964170563/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-51-43_123_8728123494397167100/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -200,12 +195,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386189 + transient_lastDdlTime 1300387903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -216,12 +211,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386189 + transient_lastDdlTime 1300387903 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 name: default.dest1 @@ -242,11 +237,11 @@ PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-15_181_1010100869443952030/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-51-58_259_3989028316210994719/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-15_181_1010100869443952030/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-51-58_259_3989028316210994719/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 468 val_469 @@ -522,42 +517,37 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 3) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 3) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket1.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -567,12 +557,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -584,12 +574,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -598,9 +588,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-03-15_460_3544213111156535893/-ext-10001 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-51-58_956_6531334149500922869/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-03-15_460_3544213111156535893/-ext-10001/ + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-51-58_956_6531334149500922869/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -621,12 +611,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-15_643_6678865763894486337/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-51-59_141_8001945522126655770/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-15_643_6678865763894486337/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-51-59_141_8001945522126655770/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 3 val_4 @@ -892,42 +882,37 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -937,12 +922,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -954,12 +939,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -968,9 +953,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-03-19_090_8431135381992111159/-ext-10001 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-10_969_7256089450359272108/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-03-19_090_8431135381992111159/-ext-10001/ + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-10_969_7256089450359272108/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -991,12 +976,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-19_285_8851945847050704102/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-11_201_2231468642223625886/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-19_285_8851945847050704102/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-11_201_2231468642223625886/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 0 val_0 @@ -1516,40 +1501,35 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 3) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 3) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat @@ -1561,12 +1541,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1578,12 +1558,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -1592,9 +1572,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-03-22_798_6263776089542221685/-ext-10001 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-23_637_6655077949911941414/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-03-22_798_6263776089542221685/-ext-10001/ + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-23_637_6655077949911941414/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1615,12 +1595,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-22_883_7765831455768854889/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-23_914_4120839381483312169/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-22_883_7765831455768854889/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-23_914_4120839381483312169/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 0 val_0 @@ -1983,40 +1963,35 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 3) = 1) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 3) = 1) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat @@ -2028,12 +2003,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2045,12 +2020,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -2059,9 +2034,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-03-26_288_651210894564805365/-ext-10001 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-37_133_3550234962592163511/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-03-26_288_651210894564805365/-ext-10001/ + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-37_133_3550234962592163511/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2082,12 +2057,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-26_383_1024253454726514714/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-37_314_1306569365042124606/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-26_383_1024253454726514714/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-37_314_1306569365042124606/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 1 val_2 @@ -2436,43 +2411,37 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt [s] - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket2 [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket2 Partition - base file name: srcbucket20.txt + base file name: srcbucket2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -2482,12 +2451,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket2 name default.srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378967 + transient_lastDdlTime 1300369956 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2499,61 +2468,23 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket2 name default.srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378967 + transient_lastDdlTime 1300369956 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 name: default.srcbucket2 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt - Partition - base file name: srcbucket22.txt - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count 4 - bucket_field_name key - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 - name default.srcbucket2 - serialization.ddl struct srcbucket2 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378967 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count 4 - bucket_field_name key - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 - name default.srcbucket2 - serialization.ddl struct srcbucket2 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378967 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.srcbucket2 - name: default.srcbucket2 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-03-29_785_5940999891168688528/-ext-10001 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-47_667_7354433833883326185/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-03-29_785_5940999891168688528/-ext-10001/ + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-47_667_7354433833883326185/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2574,12 +2505,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket2 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-30_250_6451049486065297443/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-47_844_9024499414925161608/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket2 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-30_250_6451049486065297443/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-47_844_9024499414925161608/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 0 val_0 @@ -2588,26 +2519,62 @@ 2 val_2 4 val_4 8 val_8 +10 val_10 +12 val_12 +12 val_12 +18 val_18 +18 val_18 20 val_20 24 val_24 24 val_24 26 val_26 26 val_26 28 val_28 +30 val_30 +34 val_34 42 val_42 42 val_42 44 val_44 +54 val_54 +58 val_58 +58 val_58 64 val_64 66 val_66 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +74 val_74 +76 val_76 +76 val_76 +78 val_78 80 val_80 82 val_82 84 val_84 84 val_84 86 val_86 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +96 val_96 +98 val_98 +98 val_98 +100 val_100 +100 val_100 +104 val_104 +104 val_104 114 val_114 116 val_116 118 val_118 118 val_118 +120 val_120 +120 val_120 +126 val_126 +128 val_128 +128 val_128 +128 val_128 134 val_134 134 val_134 136 val_136 @@ -2615,11 +2582,19 @@ 138 val_138 138 val_138 138 val_138 +146 val_146 +146 val_146 150 val_150 152 val_152 152 val_152 156 val_156 158 val_158 +160 val_160 +162 val_162 +164 val_164 +164 val_164 +166 val_166 +168 val_168 170 val_170 172 val_172 172 val_172 @@ -2628,6 +2603,8 @@ 176 val_176 176 val_176 178 val_178 +180 val_180 +186 val_186 190 val_190 192 val_192 194 val_194 @@ -2638,18 +2615,38 @@ 208 val_208 208 val_208 208 val_208 +214 val_214 +216 val_216 +216 val_216 +218 val_218 222 val_222 224 val_224 224 val_224 226 val_226 228 val_228 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +238 val_238 +238 val_238 242 val_242 242 val_242 244 val_244 248 val_248 +252 val_252 +256 val_256 +256 val_256 +258 val_258 260 val_260 262 val_262 266 val_266 +272 val_272 +272 val_272 +274 val_274 +278 val_278 +278 val_278 280 val_280 280 val_280 282 val_282 @@ -2658,6 +2655,14 @@ 286 val_286 288 val_288 288 val_288 +292 val_292 +296 val_296 +298 val_298 +298 val_298 +298 val_298 +302 val_302 +306 val_306 +308 val_308 310 val_310 316 val_316 316 val_316 @@ -2665,12 +2670,34 @@ 318 val_318 318 val_318 318 val_318 +322 val_322 +322 val_322 332 val_332 336 val_336 338 val_338 +342 val_342 +342 val_342 +344 val_344 +344 val_344 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 356 val_356 +360 val_360 +362 val_362 +364 val_364 +366 val_366 +368 val_368 374 val_374 378 val_378 +382 val_382 +382 val_382 +384 val_384 +384 val_384 +384 val_384 +386 val_386 392 val_392 394 val_394 396 val_396 @@ -2684,11 +2711,28 @@ 406 val_406 406 val_406 406 val_406 +414 val_414 +414 val_414 +418 val_418 424 val_424 424 val_424 +430 val_430 +430 val_430 +430 val_430 +432 val_432 +436 val_436 +438 val_438 +438 val_438 +438 val_438 444 val_444 446 val_446 448 val_448 +452 val_452 +454 val_454 +454 val_454 +454 val_454 +458 val_458 +458 val_458 460 val_460 462 val_462 462 val_462 @@ -2699,11 +2743,23 @@ 468 val_468 468 val_468 468 val_468 +470 val_470 +472 val_472 +478 val_478 +478 val_478 480 val_480 480 val_480 480 val_480 482 val_482 484 val_484 +490 val_490 +492 val_492 +492 val_492 +494 val_494 +496 val_496 +498 val_498 +498 val_498 +498 val_498 PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s ORDER BY key, value PREHOOK: type: QUERY @@ -2728,42 +2784,37 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 1) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 1) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt [s] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket2 [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket2 Partition - base file name: srcbucket21.txt + base file name: srcbucket2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -2773,12 +2824,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket2 name default.srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378967 + transient_lastDdlTime 1300369956 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2790,12 +2841,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket2 name default.srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378967 + transient_lastDdlTime 1300369956 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 name: default.srcbucket2 @@ -2804,9 +2855,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-03-34_039_7016077119926969812/-ext-10001 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-59_447_7733746943685994913/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-03-34_039_7016077119926969812/-ext-10001/ + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-59_447_7733746943685994913/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2827,34 +2878,115 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket2 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-34_309_2607550918107351258/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-59_628_1815380591601393438/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket2 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-34_309_2607550918107351258/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-52-59_628_1815380591601393438/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 5 val_5 5 val_5 5 val_5 9 val_9 +17 val_17 +33 val_33 +37 val_37 +37 val_37 41 val_41 +53 val_53 +57 val_57 +65 val_65 +69 val_69 +77 val_77 85 val_85 +97 val_97 +97 val_97 +105 val_105 +113 val_113 +113 val_113 +125 val_125 +125 val_125 +129 val_129 +129 val_129 133 val_133 137 val_137 137 val_137 +145 val_145 +149 val_149 +149 val_149 +153 val_153 +157 val_157 +165 val_165 +165 val_165 +169 val_169 +169 val_169 +169 val_169 +169 val_169 177 val_177 +181 val_181 +189 val_189 +193 val_193 +193 val_193 +193 val_193 +197 val_197 +197 val_197 +201 val_201 +205 val_205 +205 val_205 +209 val_209 +209 val_209 +213 val_213 +213 val_213 +217 val_217 +217 val_217 221 val_221 221 val_221 229 val_229 229 val_229 +233 val_233 +233 val_233 +237 val_237 +237 val_237 +241 val_241 +249 val_249 +257 val_257 265 val_265 265 val_265 +273 val_273 +273 val_273 +273 val_273 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +281 val_281 +281 val_281 +285 val_285 +289 val_289 +305 val_305 +309 val_309 +309 val_309 317 val_317 317 val_317 +321 val_321 +321 val_321 +325 val_325 +325 val_325 +333 val_333 +333 val_333 +341 val_341 +345 val_345 353 val_353 353 val_353 +365 val_365 +369 val_369 +369 val_369 +369 val_369 +373 val_373 +377 val_377 +389 val_389 393 val_393 397 val_397 397 val_397 @@ -2866,13 +2998,32 @@ 409 val_409 409 val_409 409 val_409 +413 val_413 +413 val_413 +417 val_417 +417 val_417 +417 val_417 +421 val_421 +429 val_429 +429 val_429 +437 val_437 449 val_449 +453 val_453 +457 val_457 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +477 val_477 481 val_481 485 val_485 489 val_489 489 val_489 489 val_489 489 val_489 +493 val_493 +497 val_497 PREHOOK: query: CREATE TABLE empty_bucket (key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE empty_bucket (key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE @@ -2904,44 +3055,80 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false + Path -> Alias: + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/empty_bucket [s] + Path -> Partition: + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/empty_bucket + Partition + base file name: empty_bucket + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count 2 + bucket_field_name key + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/empty_bucket + name default.empty_bucket + serialization.ddl struct empty_bucket { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300387991 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count 2 + bucket_field_name key + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/empty_bucket + name default.empty_bucket + serialization.ddl struct empty_bucket { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300387991 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.empty_bucket + name: default.empty_bucket Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-03-37_995_8480253905581863632/-ext-10001 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-53-11_766_8542376763072218418/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-03-37_995_8480253905581863632/-ext-10001/ + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-53-11_766_8542376763072218418/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2962,11 +3149,11 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@empty_bucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-38_079_8763210517724486622/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-53-11_997_4599893472906803818/-mr-10000 POSTHOOK: query: SELECT s.* FROM empty_bucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@empty_bucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-38_079_8763210517724486622/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-53-11_997_4599893472906803818/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -37,51 +37,46 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((hash(key) & 2147483647) % 4) = 0) and (key > 100)) + expr: (key > 100) type: boolean Filter Operator isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-41_838_7691290019674880228/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-03-41_838_7691290019674880228/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297386221 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-53-23_184_2968728947756644810/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_11-53-23_184_2968728947756644810/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300388003 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] Index: ql/src/test/results/clientpositive/sample8.q.out =================================================================== --- ql/src/test/results/clientpositive/sample8.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample8.q.out (working copy) @@ -33,49 +33,39 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 1) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 1) = 0) - type: boolean - Reduce Output Operator - sort order: - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string + Reduce Output Operator + sort order: + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string t TableScan alias: t GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 10) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 10) = 0) - type: boolean - Reduce Output Operator - sort order: - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + sort order: + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [t, s] Index: ql/src/test/results/clientpositive/sample9.q.out =================================================================== --- ql/src/test/results/clientpositive/sample9.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/sample9.q.out (working copy) @@ -22,52 +22,47 @@ alias: a GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: _col0 type: int - expr: value + expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-03-59_702_2188871527602970364/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-03-59_702_2188871527602970364/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types int:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-54-06_478_7795184965265906821/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-54-06_478_7795184965265906821/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types int:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s:a] + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket [s:a] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -77,12 +72,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -94,12 +89,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297378966 + transient_lastDdlTime 1300369954 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -113,12 +108,12 @@ FROM (SELECT a.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) a) s PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-59_885_5868752797870290038/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-54-06_651_1573765412929782261/-mr-10000 POSTHOOK: query: SELECT s.* FROM (SELECT a.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) a) s POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_17-03-59_885_5868752797870290038/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-54-06_651_1573765412929782261/-mr-10000 474 val_475 62 val_63 468 val_469 Index: ql/src/test/results/clientpositive/semijoin.q.out =================================================================== --- ql/src/test/results/clientpositive/semijoin.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/semijoin.q.out (working copy) @@ -517,33 +517,29 @@ predicate: expr: (key < '15') type: boolean - Filter Operator - predicate: - expr: (key < '15') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: int + outputColumnNames: key + Group By Operator + bucketGroup: false + keys: expr: key type: int - outputColumnNames: key - Group By Operator - bucketGroup: false - keys: - expr: key + expr: key + type: int + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 type: int - expr: key + sort order: + + Map-reduce partition columns: + expr: _col1 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: int - sort order: + - Map-reduce partition columns: - expr: _col1 - type: int - tag: 1 + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -653,35 +649,31 @@ predicate: expr: (value < 'val_10') type: boolean - Filter Operator - predicate: - expr: (value < 'val_10') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: key, value + Group By Operator + bucketGroup: false + keys: expr: key type: int expr: value type: string - outputColumnNames: key, value - Group By Operator - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -787,36 +779,32 @@ predicate: expr: (key > 5) type: boolean - Filter Operator - predicate: - expr: (key > 5) - type: boolean + Select Operator + expressions: + expr: key + type: int + outputColumnNames: _col0 Select Operator expressions: - expr: key + expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col0 type: int + mode: hash outputColumnNames: _col0 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -916,46 +904,38 @@ predicate: expr: ((key > 5) and (value <= 'val_20')) type: boolean - Filter Operator - predicate: - expr: (key > 5) - type: boolean + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: _col0 type: int - expr: value + expr: _col1 type: string outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col1 <= 'val_20') - type: boolean - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: + expr: _col0 + type: int + expr: _col1 + type: string + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - Group By Operator - bucketGroup: false - keys: - expr: _col0 - type: int - expr: _col1 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -1054,36 +1034,32 @@ predicate: expr: (key > 2) type: boolean - Filter Operator - predicate: - expr: (key > 2) - type: boolean + Select Operator + expressions: + expr: key + type: int + outputColumnNames: _col0 Select Operator expressions: - expr: key + expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col0 type: int + mode: hash outputColumnNames: _col0 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/set_processor_namespaces.q.out =================================================================== --- ql/src/test/results/clientpositive/set_processor_namespaces.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/set_processor_namespaces.q.out (working copy) @@ -25,23 +25,19 @@ predicate: expr: (key = 5) type: boolean - Filter Operator - predicate: - expr: (key = 5) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/skewjoin.q.out =================================================================== --- ql/src/test/results/clientpositive/skewjoin.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/skewjoin.q.out (working copy) @@ -1194,22 +1194,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src @@ -1244,19 +1240,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/smb_mapjoin9.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin9.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/smb_mapjoin9.q.out (working copy) @@ -88,27 +88,23 @@ expr: _col7 type: string outputColumnNames: _col0, _col2, _col5, _col6, _col7 - Filter Operator - predicate: - expr: (((_col2 = '2010-10-15') and (_col7 = '2010-10-15')) and _col5 is not null) - type: boolean - Select Operator - expressions: - expr: _col5 - type: int - expr: _col6 - type: string - expr: _col7 - type: string - expr: _col0 - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col5 + type: int + expr: _col6 + type: string + expr: _col7 + type: string + expr: _col0 + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/smb_mapjoin_6.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (working copy) @@ -2634,29 +2634,25 @@ expr: _col5 type: string outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: (_col0 > 1000) - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col4 + type: int + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-0 Move Operator @@ -2790,29 +2786,25 @@ expr: _col5 type: string outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: (_col0 > 1000) - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col4 + type: int + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-0 Move Operator @@ -2958,31 +2950,27 @@ expr: _col9 type: string outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Filter Operator - predicate: - expr: (_col0 > 1000) - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col4 + type: int + expr: _col5 + type: string + expr: _col8 + type: int + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/stats11.q.out =================================================================== --- ql/src/test/results/clientpositive/stats11.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/stats11.q.out (working copy) @@ -152,46 +152,41 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-20-07_364_154513288032551963/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-20-07_364_154513288032551963/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297387207 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-03-19_01-43-30_230_1699180875056648975/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-03-19_01-43-30_230_1699180875056648975/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300524210 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -583,50 +578,45 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-20-42_595_5942527123247023220/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-20-42_595_5942527123247023220/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 464 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 - transient_lastDdlTime 1297387230 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-03-19_01-43-55_136_6630874427564316631/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-03-19_01-43-55_136_6630874427564316631/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 464 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 8983 + transient_lastDdlTime 1300524226 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/stats2.q.out =================================================================== --- ql/src/test/results/clientpositive/stats2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/stats2.q.out (working copy) @@ -23,29 +23,25 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.analyze_t1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.analyze_t1 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/subq.q.out =================================================================== --- ql/src/test/results/clientpositive/subq.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/subq.q.out (working copy) @@ -31,30 +31,26 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: _col0 type: string - expr: value + expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-4 Conditional Operator Index: ql/src/test/results/clientpositive/subq2.q.out =================================================================== --- ql/src/test/results/clientpositive/subq2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/subq2.q.out (working copy) @@ -69,23 +69,19 @@ expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 >= 90) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy) @@ -50,22 +50,27 @@ columns.types string,string,string field.delim 9 serialization.format 9 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string + Filter Operator + isSamplingPred: false + predicate: + expr: ((_col1 < 100) and (_col0 = '2008-04-08')) + type: boolean + Reduce Output Operator + key expressions: + expr: _col1 + type: string + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [tmap:src] @@ -239,34 +244,29 @@ name: default.srcpart Reduce Operator Tree: Extract - Filter Operator - isSamplingPred: false - predicate: - expr: ((_col1 < 100) and (_col0 = '2008-04-08')) - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-24-54_583_8987862490121410836/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-24-54_583_8987862490121410836/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/tmp/amarsri/hive_2011-03-19_01-46-48_559_8291501891794719709/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-03-19_01-46-48_559_8291501891794719709/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/transform_ppr2.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/transform_ppr2.q.out (working copy) @@ -33,30 +33,30 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: ds - type: string - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1, _col2 - Transform Operator - command: /bin/cat - output info: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types string,string,string - field.delim 9 - serialization.format 9 + Select Operator + expressions: + expr: ds + type: string + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1, _col2 + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2 + columns.types string,string,string + field.delim 9 + serialization.format 9 + Filter Operator + isSamplingPred: false + predicate: + expr: (_col1 < 100) + type: boolean Reduce Output Operator key expressions: expr: _col1 @@ -162,34 +162,29 @@ name: default.srcpart Reduce Operator Tree: Extract - Filter Operator - isSamplingPred: false - predicate: - expr: (_col1 < 100) - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-24-59_315_262991000601799738/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-24-59_315_262991000601799738/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/tmp/amarsri/hive_2011-03-19_01-46-51_820_4418676794321891468/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-03-19_01-46-51_820_4418676794321891468/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf1.q.out =================================================================== --- ql/src/test/results/clientpositive/udf1.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf1.q.out (working copy) @@ -51,61 +51,57 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: ('a' like '%a%') - type: boolean - expr: ('b' like '%a%') - type: boolean - expr: ('ab' like '%a%') - type: boolean - expr: ('ab' like '%a_') - type: boolean - expr: ('%_' like '\%\_') - type: boolean - expr: ('ab' like '\%\_') - type: boolean - expr: ('ab' like '_a%') - type: boolean - expr: ('ab' like 'a') - type: boolean - expr: ('' rlike '.*') - type: boolean - expr: ('a' rlike '[ab]') - type: boolean - expr: ('' rlike '[ab]') - type: boolean - expr: ('hadoop' rlike '[a-z]*') - type: boolean - expr: ('hadoop' rlike 'o*') - type: boolean - expr: regexp_replace('abc', 'b', 'c') - type: string - expr: regexp_replace('abc', 'z', 'a') - type: string - expr: regexp_replace('abbbb', 'bb', 'b') - type: string - expr: regexp_replace('hadoop', '(.)[a-z]*', '$1ive') - type: string - expr: regexp_replace('hadoopAAA', 'A.*', '') - type: string - expr: regexp_replace('abc', '', 'A') - type: string - expr: ('abc' rlike '') - type: boolean - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: ('a' like '%a%') + type: boolean + expr: ('b' like '%a%') + type: boolean + expr: ('ab' like '%a%') + type: boolean + expr: ('ab' like '%a_') + type: boolean + expr: ('%_' like '\%\_') + type: boolean + expr: ('ab' like '\%\_') + type: boolean + expr: ('ab' like '_a%') + type: boolean + expr: ('ab' like 'a') + type: boolean + expr: ('' rlike '.*') + type: boolean + expr: ('a' rlike '[ab]') + type: boolean + expr: ('' rlike '[ab]') + type: boolean + expr: ('hadoop' rlike '[a-z]*') + type: boolean + expr: ('hadoop' rlike 'o*') + type: boolean + expr: regexp_replace('abc', 'b', 'c') + type: string + expr: regexp_replace('abc', 'z', 'a') + type: string + expr: regexp_replace('abbbb', 'bb', 'b') + type: string + expr: regexp_replace('hadoop', '(.)[a-z]*', '$1ive') + type: string + expr: regexp_replace('hadoopAAA', 'A.*', '') + type: string + expr: regexp_replace('abc', '', 'A') + type: string + expr: ('abc' rlike '') + type: boolean + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/udf9.q.out =================================================================== --- ql/src/test/results/clientpositive/udf9.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf9.q.out (working copy) @@ -38,51 +38,47 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: datediff('2008-12-31', '2009-01-01') - type: int - expr: datediff('2008-03-01', '2008-02-28') - type: int - expr: datediff('2007-03-01', '2007-01-28') - type: int - expr: datediff('2008-03-01 23:59:59', '2008-03-02 00:00:00') - type: int - expr: date_add('2008-12-31', 1) - type: string - expr: date_add('2008-12-31', 365) - type: string - expr: date_add('2008-02-28', 2) - type: string - expr: date_add('2009-02-28', 2) - type: string - expr: date_add('2007-02-28', 365) - type: string - expr: date_add('2007-02-28 23:59:59', 730) - type: string - expr: date_sub('2009-01-01', 1) - type: string - expr: date_sub('2009-01-01', 365) - type: string - expr: date_sub('2008-02-28', 2) - type: string - expr: date_sub('2009-02-28', 2) - type: string - expr: date_sub('2007-02-28', 365) - type: string - expr: date_sub('2007-02-28 01:12:34', 730) - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: datediff('2008-12-31', '2009-01-01') + type: int + expr: datediff('2008-03-01', '2008-02-28') + type: int + expr: datediff('2007-03-01', '2007-01-28') + type: int + expr: datediff('2008-03-01 23:59:59', '2008-03-02 00:00:00') + type: int + expr: date_add('2008-12-31', 1) + type: string + expr: date_add('2008-12-31', 365) + type: string + expr: date_add('2008-02-28', 2) + type: string + expr: date_add('2009-02-28', 2) + type: string + expr: date_add('2007-02-28', 365) + type: string + expr: date_add('2007-02-28 23:59:59', 730) + type: string + expr: date_sub('2009-01-01', 1) + type: string + expr: date_sub('2009-01-01', 365) + type: string + expr: date_sub('2008-02-28', 2) + type: string + expr: date_sub('2009-02-28', 2) + type: string + expr: date_sub('2007-02-28', 365) + type: string + expr: date_sub('2007-02-28 01:12:34', 730) + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_10_trims.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_10_trims.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_10_trims.q.out (working copy) @@ -37,23 +37,19 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: trim(trim(trim(trim(trim(trim(trim(trim(trim(trim(' abc ')))))))))) - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: trim(trim(trim(trim(trim(trim(trim(trim(trim(trim(' abc ')))))))))) + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/udf_hour.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_hour.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_hour.q.out (working copy) @@ -40,25 +40,21 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: hour('2009-08-07 13:14:15') - type: int - expr: hour('13:14:15') - type: int - expr: hour('2009-08-07') - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: hour('2009-08-07 13:14:15') + type: int + expr: hour('13:14:15') + type: int + expr: hour('2009-08-07') + type: int + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (working copy) @@ -50,26 +50,22 @@ predicate: expr: true is not null type: boolean - Filter Operator - predicate: - expr: true is not null - type: boolean - Select Operator - expressions: - expr: null is null - type: boolean - expr: 1 is not null - type: boolean - expr: 'my string' is not null - type: boolean - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: null is null + type: boolean + expr: 1 is not null + type: boolean + expr: 'my string' is not null + type: boolean + outputColumnNames: _col0, _col1, _col2 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -127,26 +123,22 @@ predicate: expr: (lint is not null and (not mstringstring is null)) type: boolean - Filter Operator - predicate: - expr: (lint is not null and (not mstringstring is null)) - type: boolean - Select Operator - expressions: - expr: lint is not null - type: boolean - expr: lintstring is not null - type: boolean - expr: mstringstring is not null - type: boolean - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: lint is not null + type: boolean + expr: lintstring is not null + type: boolean + expr: mstringstring is not null + type: boolean + outputColumnNames: _col0, _col1, _col2 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_like.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_like.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_like.q.out (working copy) @@ -41,43 +41,39 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: ('_%_' like '%\_\%\_%') - type: boolean - expr: ('__' like '%\_\%\_%') - type: boolean - expr: ('%%_%_' like '%\_\%\_%') - type: boolean - expr: ('%_%_%' like '%\%\_\%') - type: boolean - expr: ('_%_' like '\%\_%') - type: boolean - expr: ('%__' like '__\%%') - type: boolean - expr: ('_%' like '\_\%\_\%%') - type: boolean - expr: ('_%' like '\_\%_%') - type: boolean - expr: ('%_' like '\%\_') - type: boolean - expr: ('ab' like '\%\_') - type: boolean - expr: ('ab' like '_a%') - type: boolean - expr: ('ab' like 'a') - type: boolean - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: ('_%_' like '%\_\%\_%') + type: boolean + expr: ('__' like '%\_\%\_%') + type: boolean + expr: ('%%_%_' like '%\_\%\_%') + type: boolean + expr: ('%_%_%' like '%\%\_\%') + type: boolean + expr: ('_%_' like '\%\_%') + type: boolean + expr: ('%__' like '__\%%') + type: boolean + expr: ('_%' like '\_\%\_\%%') + type: boolean + expr: ('_%' like '\_\%_%') + type: boolean + expr: ('%_' like '\%\_') + type: boolean + expr: ('ab' like '\%\_') + type: boolean + expr: ('ab' like '_a%') + type: boolean + expr: ('ab' like 'a') + type: boolean + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_lower.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_lower.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_lower.q.out (working copy) @@ -36,23 +36,19 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: lower('AbC 123') - type: string - expr: upper('AbC 123') - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: lower('AbC 123') + type: string + expr: upper('AbC 123') + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_minute.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_minute.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_minute.q.out (working copy) @@ -40,25 +40,21 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: minute('2009-08-07 13:14:15') - type: int - expr: minute('13:14:15') - type: int - expr: minute('2009-08-07') - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: minute('2009-08-07 13:14:15') + type: int + expr: minute('13:14:15') + type: int + expr: minute('2009-08-07') + type: int + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_notequal.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_notequal.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_notequal.q.out (working copy) @@ -48,23 +48,19 @@ predicate: expr: (key <> '302') type: boolean - Filter Operator - predicate: - expr: (key <> '302') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -610,23 +606,19 @@ predicate: expr: (key <> '302') type: boolean - Filter Operator - predicate: - expr: (key <> '302') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_parse_url.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_parse_url.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_parse_url.q.out (working copy) @@ -63,41 +63,37 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PATH') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'REF') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k2') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k3') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'FILE') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PROTOCOL') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'USERINFO') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'AUTHORITY') - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PATH') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'REF') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k2') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k3') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'FILE') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PROTOCOL') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'USERINFO') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'AUTHORITY') + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_second.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_second.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_second.q.out (working copy) @@ -40,25 +40,21 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: second('2009-08-07 13:14:15') - type: int - expr: second('13:14:15') - type: int - expr: second('2009-08-07') - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: second('2009-08-07 13:14:15') + type: int + expr: second('13:14:15') + type: int + expr: second('2009-08-07') + type: int + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_size.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_size.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/udf_size.q.out (working copy) @@ -44,28 +44,24 @@ predicate: expr: (lint is not null and (not mstringstring is null)) type: boolean - Filter Operator - predicate: - expr: (lint is not null and (not mstringstring is null)) - type: boolean - Select Operator - expressions: - expr: size(lint) - type: int - expr: size(lintstring) - type: int - expr: size(mstringstring) - type: int - expr: size(null) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: size(lint) + type: int + expr: size(lintstring) + type: int + expr: size(mstringstring) + type: int + expr: size(null) + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/union.q.out =================================================================== --- ql/src/test/results/clientpositive/union.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/union.q.out (working copy) @@ -39,31 +39,27 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:unioninput-subquery2:src TableScan alias: src @@ -71,31 +67,27 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-4 Conditional Operator Index: ql/src/test/results/clientpositive/union20.q.out =================================================================== --- ql/src/test/results/clientpositive/union20.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/union20.q.out (working copy) @@ -164,23 +164,19 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-4 Map Reduce @@ -192,23 +188,19 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-5 Map Reduce Index: ql/src/test/results/clientpositive/union22.q.out =================================================================== --- ql/src/test/results/clientpositive/union22.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/union22.q.out (working copy) @@ -105,32 +105,27 @@ predicate: expr: (k0 > 50) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '1') and (k0 > 50)) - type: boolean - Select Operator - expressions: - expr: k1 - type: string - expr: k3 - type: string - expr: k4 - type: string - outputColumnNames: _col1, _col3, _col4 - HashTable Sink Operator - condition expressions: - 0 {k1} {k2} - 1 {_col3} {_col4} - filter predicates: - 0 {(ds = '1')} - 1 - handleSkewJoin: false - keys: - 0 [Column[k1]] - 1 [Column[_col1]] - Position of Big Table: 0 + Select Operator + expressions: + expr: k1 + type: string + expr: k3 + type: string + expr: k4 + type: string + outputColumnNames: _col1, _col3, _col4 + HashTable Sink Operator + condition expressions: + 0 {k1} {k2} + 1 {_col3} {_col4} + filter predicates: + 0 {(ds = '1')} + 1 + handleSkewJoin: false + keys: + 0 [Column[k1]] + 1 [Column[_col1]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -244,20 +239,79 @@ expr: _col11 type: string outputColumnNames: _col0, _col1, _col10, _col11 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col10 + type: string + expr: _col11 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-03-19_02-06-25_381_4233984888026726131/-ext-10000 + NumFilesPerFileSink: 1 + Static Partition Specification: ds=2/ + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-03-19_02-06-25_381_4233984888026726131/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns k1,k2,k3,k4 + columns.types string:string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dst_union22 + name default.dst_union22 + numFiles 1 + numPartitions 1 + numRows 500 + partition_columns ds + serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11624 + transient_lastDdlTime 1300525581 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dst_union22 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false + null-subquery1:subq-subquery1:dst_union22_delta + TableScan + alias: dst_union22_delta + GatherStats: false Filter Operator isSamplingPred: false predicate: - expr: (_col0 > 20) + expr: (k0 <= 50) type: boolean Select Operator expressions: - expr: _col0 + expr: k1 type: string - expr: _col1 + expr: k2 type: string - expr: _col10 + expr: k3 type: string - expr: _col11 + expr: k4 type: string outputColumnNames: _col0, _col1, _col2, _col3 Union @@ -304,75 +358,6 @@ TotalFiles: 1 GatherStats: true MultiFileSpray: false - null-subquery1:subq-subquery1:dst_union22_delta - TableScan - alias: dst_union22_delta - GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (k0 <= 50) - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '1') and (k0 <= 50)) - type: boolean - Select Operator - expressions: - expr: k1 - type: string - expr: k2 - type: string - expr: k3 - type: string - expr: k4 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-51-32_666_7864685352612730230/-ext-10000 - NumFilesPerFileSink: 1 - Static Partition Specification: ds=2/ - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-51-32_666_7864685352612730230/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns k1,k2,k3,k4 - columns.types string:string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dst_union22 - name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 - partition_columns ds - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 - transient_lastDdlTime 1297389087 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dst_union22 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false Needs Tagging: false Path -> Alias: file:/tmp/sdong/hive_2011-02-10_17-51-32_666_7864685352612730230/-mr-10002 [file:/tmp/sdong/hive_2011-02-10_17-51-32_666_7864685352612730230/-mr-10002] @@ -478,6 +463,7 @@ Stats-Aggr Operator Stats Aggregation Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_17-51-32_666_7864685352612730230/-ext-10000/ + PREHOOK: query: insert overwrite table dst_union22 partition (ds='2') select * from ( Index: ql/src/test/results/clientpositive/union_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/union_ppr.q.out (revision 1083142) +++ ql/src/test/results/clientpositive/union_ppr.q.out (working copy) @@ -36,60 +36,50 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Filter Operator - isSamplingPred: false - predicate: - expr: (_col2 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: ++++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + sort order: ++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string null-subquery2:a-subquery2:y TableScan alias: y @@ -99,60 +89,50 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Filter Operator - isSamplingPred: false - predicate: - expr: (_col2 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: ++++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + sort order: ++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [null-subquery1:a-subquery1:x, null-subquery2:a-subquery2:y] Index: ql/src/test/results/compiler/plan/case_sensitivity.q.xml =================================================================== --- ql/src/test/results/compiler/plan/case_sensitivity.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/case_sensitivity.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928223 + 1300391079 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928223 + 1300391079 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1297928222 + 1300391078 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1297928222 + 1300391078 @@ -622,327 +622,176 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-37-03_745_5488933276926673189/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - - - - - - - lintstring - - - src_thrift - - - - - - - - - myint - - - mystring - - - underscore_int - - - - - - - - - int - - - - - - - - - - - - - - - - - - - - - - - - 0 - - - - - - - - - - - - - - - MYSTRING - - - false - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10002 - - _col0 - - - - - - - lint - - - src_thrift - - - - - - - - - - - - - - - - - 1 - - - - - - - - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-44-40_081_7511684353330494180/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - + + + + + + + + + + _col1 + + + + - - - _col0 + + + lintstring - - + + src_thrift + + + + + + + + myint + + + mystring + + + underscore_int + + + + + + + + + int + + + + + + + + + + + + + + + - - - _col1 + + + - - + + 0 + + + + + + + + MYSTRING + + + false + + + + - - - - - - + + _col0 + - - - - - - - lint - - - src_thrift - - - - - - - - - - - - - 0 - - - - + + + lint - - + + src_thrift - + + + + + @@ -952,44 +801,64 @@ - 0 + 1 - + - - - boolean - - + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -1003,28 +872,22 @@ - + - lint + _col0 - - src_thrift - - + - + - lintstring + _col1 - - src_thrift - - + @@ -1095,7 +958,11 @@ - + + + boolean + + @@ -1158,7 +1025,17 @@ - + + + lint + + + src_thrift + + + + + @@ -1178,7 +1055,17 @@ - + + + lintstring + + + src_thrift + + + + + @@ -1300,7 +1187,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift src_thrift @@ -1312,7 +1199,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift src_thrift @@ -1373,11 +1260,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1297928222 + 1300391078 @@ -1439,11 +1326,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1297928222 + 1300391078 Index: ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/cast1.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -62,11 +62,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928231 + 1300391089 @@ -124,11 +124,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928231 + 1300391089 @@ -152,592 +152,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-02-16_23-37-13_345_3549777718350923110/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-02-16_23-37-13_345_3549777718350923110/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3,_col4,_col5,_col6 - - - serialization.format - 1 - - - columns.types - int:double:double:double:int:boolean:int - - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - - - - - _col0 - - - - - - - - int - - - - - - - - - _col1 - - - - - - - - double - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - _col4 - - - - - - - - - - - - - _col5 - - - - - - - - boolean - - - - - - - - - _col6 - - - - - - - - - - - - - - + + + + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-44-52_631_6393389239265423956/-ext-10001 - - - - - - _col6 - - - - - - - - - - true - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToInteger - - - UDFToInteger - - - - - - - + + 1 - - _col5 - - - - - - - - - - 1 - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToBoolean - - - UDFToBoolean - - - - - - - + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-44-52_631_6393389239265423956/-ext-10001/ - - _col4 - - - - - - - - - - 3 - - - - - - - - - - - - - - 2.0 - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToInteger - - - UDFToInteger - - - - - - - - - + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - + + org.apache.hadoop.mapred.TextInputFormat - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - _col3 - - - - - - - - - - 3.0 - - + + + + columns + _col0,_col1,_col2,_col3,_col4,_col5,_col6 - - - - - - - 2.0 - - + + serialization.format + 1 - - - - - - true + + columns.types + int:double:double:double:int:boolean:int - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - - - - - _col2 - - - - - - - - - - 3 - - - - - - - - - - 2.0 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - - - - - - + + 1 - - _col1 - - - - - - - - - - 3.0 - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - - - - - - - - - _col0 - - - - - - - - - - 3 - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - _col4 - - - _col5 - - - _col6 - - - - - - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + @@ -747,38 +234,46 @@ - - _c0 - _col0 + + + - + + + int + + - - _c1 - _col1 + + + - + + + double + + - - _c2 - _col2 + + + @@ -786,12 +281,12 @@ - - _c3 - _col3 + + + @@ -799,12 +294,12 @@ - - _c4 - _col4 + + + @@ -812,25 +307,29 @@ - - _c5 - _col5 + + + - + + + boolean + + - - _c6 - _col6 + + + @@ -844,69 +343,392 @@ - - - - + + + + _col6 + - - - key + + + - - src + + true + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToInteger + + + UDFToInteger + + + + + + + + + + _col5 + + + + + - - - string + + + + 1 + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToBoolean + + + UDFToBoolean + + + + + + + + + + _col4 + + + + + + + + + + 3 + + + + + + + + + + + + + + 2.0 + + + + + + org.apache.hadoop.hive.ql.udf.UDFToInteger + + + UDFToInteger + + + + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + + + + + + + + + _col3 + + + + + + + 3.0 + + + + + + + + + + 2.0 + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + + + + + + + + + _col2 + + + + + + - 86 + 3 + + + + + + + 2.0 + + + - + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + - + + + _col1 + + + + + + + + + + 3.0 + + + + + + + + + + 2 + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + + + + + + + + + _col0 + + + + + + + + + + 3 + + + + + + + + + + 2 + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + _col4 + + + _col5 + + + _col6 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -920,18 +742,96 @@ - + + + _c0 + - key + _col0 - - src + + + + + + + + _c1 + + + _col1 + - + + + + + _c2 + + + _col2 + + + + + + + + + + _c3 + + + _col3 + + + + + + + + + + _c4 + + + _col4 + + + + + + + + + + _c5 + + + _col5 + + + + + + + + + + _c6 + + + _col6 + + + + + + @@ -955,7 +855,11 @@ src - + + + string + + @@ -1012,7 +916,17 @@ - + + + key + + + src + + + + + @@ -1121,7 +1035,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -1133,7 +1047,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -1190,11 +1104,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928231 + 1300391089 @@ -1252,11 +1166,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928231 + 1300391089 Index: ql/src/test/results/compiler/plan/input1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input1.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/input1.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928301 + 1300391173 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928301 + 1300391173 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10002 @@ -524,11 +524,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928298 + 1300391170 @@ -586,11 +586,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928298 + 1300391170 @@ -614,260 +614,143 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-38-21_530_2431512267255947938/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - src - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10002 - - _col0 - - - key - - - src - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-46-13_762_778496384983396349/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - - - - - - _col0 - - - src - - - - - - - - - - _col1 - - - src - - - - - - - - - + - - - - - - - - - - key - - - src - - - - - - - - - - - - int - - - - - 100 - - - - + + + + _col1 + + + value - - + + src - - - boolean - - + + + _col0 + + + key + + + src + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -881,9 +764,9 @@ - + - key + _col0 src @@ -894,9 +777,9 @@ - + - value + _col1 src @@ -936,7 +819,11 @@ - + + + int + + 100 @@ -949,7 +836,11 @@ - + + + boolean + + @@ -986,10 +877,30 @@ - + + + key + + + src + + + + + - + + + value + + + src + + + + + @@ -1091,7 +1002,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -1103,7 +1014,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -1160,11 +1071,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928298 + 1300391170 @@ -1222,11 +1133,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928298 + 1300391170 Index: ql/src/test/results/compiler/plan/input4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input4.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/input4.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -22,7 +22,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-02_104_6921365113719111876/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-01_031_8617310114678800073/-ext-10000/ @@ -58,7 +58,7 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-02_104_6921365113719111876/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-01_031_8617310114678800073/-ext-10000 @@ -111,11 +111,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928341 + 1300391220 @@ -125,7 +125,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-02_104_6921365113719111876/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-01_031_8617310114678800073/-ext-10001 @@ -196,11 +196,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928339 + 1300391218 @@ -258,11 +258,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928339 + 1300391218 @@ -290,160 +290,274 @@ - - - - - _col1 - - - _col1 - - - - - string + + + + + + + + + _col1 + + + _col1 + + + + + string + + + + + + _col0 + + + _col0 + + + + + + - - - - _col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 + + + + - - + + + + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + reducesinkkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + + + reducesinkkey0 + + + + + + + _col0 + + + _col1 + + + + + + + + + _col0 + + + + + + + + + + -1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - reducesinkkey0 + + + + CNTR_NAME_RS_3_NUM_INPUT_ROWS - - serialization.sort.order - + + + CNTR_NAME_RS_3_NUM_OUTPUT_ROWS - - columns.types - string + + CNTR_NAME_RS_3_TIME_TAKEN + + CNTR_NAME_RS_3_FATAL_ERROR + - - - - 1 - - - -1 - - - - - reducesinkkey0 + + RS_3 - - - - - - _col0 + + + + + + - - _col1 + + + + + + + + tkey + + + _col0 + + + + + + + + + + tvalue + + + _col1 + + + + + + + + + - - - - - - _col0 + + + + + + + + + + + + _col0 + + + + + - - + + + + + + int + + + + + 100 + + - - - - -1 - - - - - + + - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 + + + + boolean - - columns.types - string,string - - - escape.delim - \ - @@ -453,21 +567,21 @@ - CNTR_NAME_RS_3_NUM_INPUT_ROWS + CNTR_NAME_FIL_8_NUM_INPUT_ROWS - CNTR_NAME_RS_3_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_8_NUM_OUTPUT_ROWS - CNTR_NAME_RS_3_TIME_TAKEN + CNTR_NAME_FIL_8_TIME_TAKEN - CNTR_NAME_RS_3_FATAL_ERROR + CNTR_NAME_FIL_8_FATAL_ERROR - RS_3 + FIL_8 @@ -479,34 +593,7 @@ - - - - - tkey - - - _col0 - - - - - - - - - - tvalue - - - _col1 - - - - - - - + @@ -901,7 +988,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src tmap:src @@ -913,7 +1000,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -970,11 +1057,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928339 + 1300391218 @@ -1032,11 +1119,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928339 + 1300391218 @@ -1054,183 +1141,59 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-02_104_6921365113719111876/-ext-10000 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-02_104_6921365113719111876/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_7_NUM_INPUT_ROWS - - - CNTR_NAME_FS_7_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_7_TIME_TAKEN - - - CNTR_NAME_FS_7_FATAL_ERROR - - - - - FS_7 - - - - - - - - - - - - - - - - key - - - - - - - - - - - - - value - - - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - _col1 - - - tmap - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-01_031_8617310114678800073/-ext-10000 - - _col0 - - - _col0 - - - tmap - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-01_031_8617310114678800073/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_6_NUM_INPUT_ROWS + CNTR_NAME_FS_7_NUM_INPUT_ROWS - CNTR_NAME_SEL_6_NUM_OUTPUT_ROWS + CNTR_NAME_FS_7_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_6_TIME_TAKEN + CNTR_NAME_FS_7_TIME_TAKEN - CNTR_NAME_SEL_6_FATAL_ERROR + CNTR_NAME_FS_7_FATAL_ERROR - SEL_6 + FS_7 - + @@ -1241,10 +1204,10 @@ - _col0 + key - tmap + @@ -1254,10 +1217,10 @@ - _col1 + value - tmap + @@ -1272,73 +1235,80 @@ - - - - - - - - - - _col0 - - - tmap - - - - - - - - - - - - int - - - - - 100 - - - - + + + + _col1 + + + _col1 - - + + tmap - - - boolean - - + + + _col0 + + + _col0 + + + tmap + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_5_NUM_INPUT_ROWS + CNTR_NAME_SEL_6_NUM_INPUT_ROWS - CNTR_NAME_FIL_5_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_6_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_5_TIME_TAKEN + CNTR_NAME_SEL_6_TIME_TAKEN - CNTR_NAME_FIL_5_FATAL_ERROR + CNTR_NAME_SEL_6_FATAL_ERROR - FIL_5 + SEL_6 @@ -1352,7 +1322,7 @@ - + _col0 @@ -1365,7 +1335,7 @@ - + _col1 @@ -1433,10 +1403,30 @@ - + + + _col0 + + + tmap + + + + + - + + + _col1 + + + tmap + + + + + Index: ql/src/test/results/compiler/plan/input6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input6.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/input6.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928361 + 1300391244 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928361 + 1300391244 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10002 @@ -524,11 +524,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1297928360 + 1300391243 @@ -586,11 +586,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1297928360 + 1300391243 @@ -614,246 +614,143 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-22_384_817909773571329389/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - src1 - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10002 - - _col0 - - - key - - - src1 - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-47-25_448_5509394104615880553/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - - - - - - _col0 - - - src1 - - - - - - - - - - _col1 - - - src1 - - - - - - - - - + - - - - - - - - - - key - - - src1 - - - - - - - + + + + _col1 + + + value - - + + src1 - - - boolean - - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -867,9 +764,9 @@ - + - key + _col0 src1 @@ -880,9 +777,9 @@ - + - value + _col1 src1 @@ -925,7 +822,11 @@ - + + + boolean + + @@ -962,10 +863,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -1067,7 +988,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 src1 @@ -1079,7 +1000,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 src1 @@ -1136,11 +1057,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1297928360 + 1300391243 @@ -1198,11 +1119,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1297928360 + 1300391243 Index: ql/src/test/results/compiler/plan/input9.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input9.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/input9.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928392 + 1300391280 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928392 + 1300391280 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10002 @@ -524,11 +524,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1297928390 + 1300391278 @@ -586,11 +586,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1297928390 + 1300391278 @@ -614,242 +614,137 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-39-52_946_1141540538039035344/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - key - - - src1 - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10002 - - _col0 - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-48-00_461_5610626175173295380/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - - - - - - _col0 - - - - - void - - - - - - - - - _col1 - - - src1 - - - - - - - - - + - - - - - - - - - - - - - - - - - - - - - + + + + _col1 + + + key - - + + src1 - - - boolean - - + + + _col0 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -863,10 +758,24 @@ - + - key + _col0 + + + + void + + + + + + + + + _col1 + src1 @@ -909,7 +818,11 @@ - + + + boolean + + @@ -946,7 +859,17 @@ - + + + key + + + src1 + + + + + @@ -1058,7 +981,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 src1 @@ -1070,7 +993,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 src1 @@ -1127,11 +1050,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1297928390 + 1300391278 @@ -1189,11 +1112,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1297928390 + 1300391278 Index: ql/src/test/results/compiler/plan/input_part1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -75,11 +75,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 transient_lastDdlTime - 1297928393 + 1300391281 @@ -141,11 +141,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1297928393 + 1300391281 @@ -169,279 +169,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-02-16_23-40-03_483_6930762679607486323/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-02-16_23-40-03_483_6930762679607486323/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3 - - - serialization.format - 1 - - - columns.types - string:string:string:string - - - - - - - 1 - - + + + + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-48-12_940_1509363612769885979/-ext-10001 + + + 1 + + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-48-12_940_1509363612769885979/-ext-10001/ + + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - + + org.apache.hadoop.mapred.TextInputFormat - - FS_3 + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - + + + + columns + _col0,_col1,_col2,_col3 - - - - - - - - - - _col0 - - - - - - - - string - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - + + serialization.format + 1 + + columns.types + string:string:string:string + - - - - - - _col3 - - - ds - - - true - - - srcpart - - - - - + + 1 - - _col2 - - - hr - - - true - - - srcpart - - - - - - - - _col1 - - - value - - - srcpart - - - - - - - - _col0 - - - key - - - srcpart - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - - - - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + @@ -451,30 +251,28 @@ - - key - _col0 - srcpart + - + + + string + + - - value - _col1 - srcpart + @@ -483,14 +281,11 @@ - - hr - _col2 - srcpart + @@ -499,14 +294,11 @@ - - ds - _col3 - srcpart + @@ -521,181 +313,126 @@ - - - - - - - - - - - - - - - - - - key - - - srcpart - - - - - - - - - - - - int - - - - - 100 - - - - - - - - - - - - boolean - - - - - - - - - - - - - ds - - - true - - - srcpart - - - - - - - - - - - - - 2008-04-08 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - hr - - - true - - - srcpart - - - - - - - - - - - - - 12 - - - - - - - - - - - - - - + + + + _col3 + + + ds - - + + true + + srcpart + - + + + _col2 + + + hr + + + true + + + srcpart + + + + + + + + _col1 + + + value + + + srcpart + + + + + + + + _col0 + + + key + + + srcpart + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -709,10 +446,13 @@ - - + + key + + _col0 + srcpart @@ -722,10 +462,13 @@ - - + + value + + _col1 + srcpart @@ -735,9 +478,12 @@ - + + + hr + - ds + _col2 srcpart @@ -748,9 +494,12 @@ - + + + ds + - hr + _col3 srcpart @@ -790,7 +539,11 @@ - + + + int + + 100 @@ -803,7 +556,11 @@ - + + + boolean + + @@ -840,16 +597,56 @@ - + + + key + + + srcpart + + + + + - + + + value + + + srcpart + + + + + - + + + ds + + + srcpart + + + + + - + + + hr + + + srcpart + + + + + @@ -948,7 +745,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 srcpart @@ -960,7 +757,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 hr=12 @@ -1030,11 +827,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 transient_lastDdlTime - 1297928393 + 1300391281 @@ -1096,11 +893,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1297928393 + 1300391281 Index: ql/src/test/results/compiler/plan/input_testxpath2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath2.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/input_testxpath2.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -66,11 +66,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1297928433 + 1300391327 @@ -132,11 +132,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1297928433 + 1300391327 @@ -160,328 +160,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-02-16_23-40-34_713_2180558700676691771/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-02-16_23-40-34_713_2180558700676691771/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2 - - - serialization.format - 1 - - - columns.types - int:int:int - - - - - - - 1 - - + + + + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-48-49_020_112154668706917126/-ext-10001 + + + 1 + + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-48-49_020_112154668706917126/-ext-10001/ + + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - + + org.apache.hadoop.mapred.TextInputFormat - - FS_3 + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - + + + + columns + _col0,_col1,_col2 - - - - - - - - - - _col0 - - - - - - - - int - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - + + serialization.format + 1 - - - - - - - - - - _col2 - - - - - - - mstringstring - - - src_thrift - - - - - - - string - - - - - - - - - + + columns.types + int:int:int - - - - - - - - _col1 - - - - - - - lintstring - - - src_thrift - - - - - - - - - myint - - - mystring - - - underscore_int - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + 1 - - _col0 - - - - - - - lint - - - src_thrift - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - - - - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + @@ -491,25 +242,29 @@ - - _c0 - _col0 + + + - + + + int + + - - _c1 - _col1 + + + @@ -517,12 +272,12 @@ - - _c2 - _col2 + + + @@ -536,114 +291,188 @@ - - - - + + + + _col2 + - - - - - - - lint - - - src_thrift - - - - - - - + + + mstringstring - - + + src_thrift - - - boolean + + + + + string + + + + + + + + + + + + + + + + + _col1 + + + - - - - - - + + + lintstring + + + src_thrift + + + + + + - - - mstringstring - - - src_thrift - - - - - + myint + + mystring + + + underscore_int + - - + + + + + + + + + + + + - - - - - + + + + + + + + + + + + + + _col0 + + + + + + + lint + + src_thrift + - + + + + + - + - + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -657,41 +486,41 @@ - + + + _c0 + - lint + _col0 - - src_thrift - - + - + + + _c1 + - lintstring + _col1 - - src_thrift - - + - + + + _c2 + - mstringstring + _col2 - - src_thrift - - + @@ -732,7 +561,11 @@ - + + + boolean + + @@ -846,7 +679,17 @@ - + + + lint + + + src_thrift + + + + + @@ -866,10 +709,30 @@ - + + + lintstring + + + src_thrift + + + + + - + + + mstringstring + + + src_thrift + + + + + @@ -971,7 +834,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift src_thrift @@ -983,7 +846,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift src_thrift @@ -1044,11 +907,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1297928433 + 1300391327 @@ -1110,11 +973,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src_thrift + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1297928433 + 1300391327 Index: ql/src/test/results/compiler/plan/join4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join4.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/join4.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -284,249 +284,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_6_NUM_INPUT_ROWS - - - CNTR_NAME_RS_6_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_6_TIME_TAKEN - - - CNTR_NAME_RS_6_FATAL_ERROR - - - - - RS_6 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -535,18 +320,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -556,31 +390,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_5_NUM_INPUT_ROWS + CNTR_NAME_RS_6_NUM_INPUT_ROWS - CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS + CNTR_NAME_RS_6_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_5_TIME_TAKEN + CNTR_NAME_RS_6_TIME_TAKEN - CNTR_NAME_SEL_5_FATAL_ERROR + CNTR_NAME_RS_6_FATAL_ERROR - SEL_5 + RS_6 - + @@ -591,8 +467,11 @@ - _col0 + VALUE._col0 + + a + @@ -601,8 +480,11 @@ - _col1 + VALUE._col1 + + a + @@ -616,124 +498,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_SEL_5_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_SEL_5_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_SEL_5_FATAL_ERROR - FIL_4 + SEL_5 @@ -747,26 +585,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -806,7 +638,11 @@ - + + + int + + 10 @@ -819,7 +655,11 @@ - + + + boolean + + @@ -903,10 +743,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -1006,252 +866,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_7_NUM_INPUT_ROWS - - - CNTR_NAME_RS_7_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_7_TIME_TAKEN - - - CNTR_NAME_RS_7_FATAL_ERROR - - - - - RS_7 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1260,18 +902,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1281,31 +972,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_RS_7_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_RS_7_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_RS_7_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_RS_7_FATAL_ERROR - SEL_2 + RS_7 - + @@ -1316,8 +1052,11 @@ - _col0 + VALUE._col0 + + b + @@ -1326,8 +1065,11 @@ - _col1 + VALUE._col1 + + b + @@ -1341,121 +1083,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 - + @@ -1464,26 +1170,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1620,10 +1320,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + Index: ql/src/test/results/compiler/plan/join5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join5.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/join5.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -284,249 +284,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_6_NUM_INPUT_ROWS - - - CNTR_NAME_RS_6_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_6_TIME_TAKEN - - - CNTR_NAME_RS_6_FATAL_ERROR - - - - - RS_6 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -535,18 +320,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -556,31 +390,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_5_NUM_INPUT_ROWS + CNTR_NAME_RS_6_NUM_INPUT_ROWS - CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS + CNTR_NAME_RS_6_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_5_TIME_TAKEN + CNTR_NAME_RS_6_TIME_TAKEN - CNTR_NAME_SEL_5_FATAL_ERROR + CNTR_NAME_RS_6_FATAL_ERROR - SEL_5 + RS_6 - + @@ -591,8 +467,11 @@ - _col0 + VALUE._col0 + + a + @@ -601,8 +480,11 @@ - _col1 + VALUE._col1 + + a + @@ -616,124 +498,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_SEL_5_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_SEL_5_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_SEL_5_FATAL_ERROR - FIL_4 + SEL_5 @@ -747,26 +585,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -806,7 +638,11 @@ - + + + int + + 10 @@ -819,7 +655,11 @@ - + + + boolean + + @@ -903,10 +743,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -1006,252 +866,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_7_NUM_INPUT_ROWS - - - CNTR_NAME_RS_7_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_7_TIME_TAKEN - - - CNTR_NAME_RS_7_FATAL_ERROR - - - - - RS_7 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1260,18 +902,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1281,31 +972,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_RS_7_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_RS_7_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_RS_7_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_RS_7_FATAL_ERROR - SEL_2 + RS_7 - + @@ -1316,8 +1052,11 @@ - _col0 + VALUE._col0 + + b + @@ -1326,8 +1065,11 @@ - _col1 + VALUE._col1 + + b + @@ -1341,121 +1083,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 - + @@ -1464,26 +1170,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1620,10 +1320,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + Index: ql/src/test/results/compiler/plan/join6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join6.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/join6.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -284,249 +284,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_6_NUM_INPUT_ROWS - - - CNTR_NAME_RS_6_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_6_TIME_TAKEN - - - CNTR_NAME_RS_6_FATAL_ERROR - - - - - RS_6 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -535,18 +320,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -556,31 +390,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_5_NUM_INPUT_ROWS + CNTR_NAME_RS_6_NUM_INPUT_ROWS - CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS + CNTR_NAME_RS_6_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_5_TIME_TAKEN + CNTR_NAME_RS_6_TIME_TAKEN - CNTR_NAME_SEL_5_FATAL_ERROR + CNTR_NAME_RS_6_FATAL_ERROR - SEL_5 + RS_6 - + @@ -591,8 +467,11 @@ - _col0 + VALUE._col0 + + a + @@ -601,8 +480,11 @@ - _col1 + VALUE._col1 + + a + @@ -616,124 +498,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_SEL_5_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_SEL_5_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_SEL_5_FATAL_ERROR - FIL_4 + SEL_5 @@ -747,26 +585,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -806,7 +638,11 @@ - + + + int + + 10 @@ -819,7 +655,11 @@ - + + + boolean + + @@ -903,10 +743,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -1006,252 +866,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_7_NUM_INPUT_ROWS - - - CNTR_NAME_RS_7_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_7_TIME_TAKEN - - - CNTR_NAME_RS_7_FATAL_ERROR - - - - - RS_7 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1260,18 +902,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1281,31 +972,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_RS_7_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_RS_7_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_RS_7_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_RS_7_FATAL_ERROR - SEL_2 + RS_7 - + @@ -1316,8 +1052,11 @@ - _col0 + VALUE._col0 + + b + @@ -1326,8 +1065,11 @@ - _col1 + VALUE._col1 + + b + @@ -1341,121 +1083,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 - + @@ -1464,26 +1170,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1620,10 +1320,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + Index: ql/src/test/results/compiler/plan/join7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join7.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/join7.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -416,249 +416,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_9_NUM_INPUT_ROWS - - - CNTR_NAME_RS_9_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_9_TIME_TAKEN - - - CNTR_NAME_RS_9_FATAL_ERROR - - - - - RS_9 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -667,18 +452,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -688,31 +522,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_8_NUM_INPUT_ROWS + CNTR_NAME_RS_9_NUM_INPUT_ROWS - CNTR_NAME_SEL_8_NUM_OUTPUT_ROWS + CNTR_NAME_RS_9_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_8_TIME_TAKEN + CNTR_NAME_RS_9_TIME_TAKEN - CNTR_NAME_SEL_8_FATAL_ERROR + CNTR_NAME_RS_9_FATAL_ERROR - SEL_8 + RS_9 - + @@ -723,8 +599,11 @@ - _col0 + VALUE._col0 + + a + @@ -733,8 +612,11 @@ - _col1 + VALUE._col1 + + a + @@ -748,124 +630,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_7_NUM_INPUT_ROWS + CNTR_NAME_SEL_8_NUM_INPUT_ROWS - CNTR_NAME_FIL_7_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_8_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_7_TIME_TAKEN + CNTR_NAME_SEL_8_TIME_TAKEN - CNTR_NAME_FIL_7_FATAL_ERROR + CNTR_NAME_SEL_8_FATAL_ERROR - FIL_7 + SEL_8 @@ -879,26 +717,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -938,7 +770,11 @@ - + + + int + + 10 @@ -951,7 +787,11 @@ - + + + boolean + + @@ -1035,10 +875,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -1138,252 +998,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_10_NUM_INPUT_ROWS - - - CNTR_NAME_RS_10_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_10_TIME_TAKEN - - - CNTR_NAME_RS_10_FATAL_ERROR - - - - - RS_10 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1392,18 +1034,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1413,31 +1104,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_RS_10_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_RS_10_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_RS_10_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_RS_10_FATAL_ERROR - SEL_2 + RS_10 - + @@ -1448,8 +1184,11 @@ - _col0 + VALUE._col0 + + b + @@ -1458,8 +1197,11 @@ - _col1 + VALUE._col1 + + b + @@ -1473,121 +1215,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 - + @@ -1596,26 +1302,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1752,10 +1452,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + @@ -1851,252 +1571,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 2 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_11_NUM_INPUT_ROWS - - - CNTR_NAME_RS_11_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_11_TIME_TAKEN - - - CNTR_NAME_RS_11_FATAL_ERROR - - - - - RS_11 - - - - - - - - - - - - - - - - VALUE._col0 - - - c - - - - - - - - - - VALUE._col1 - - - c - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src3 - - _col0 - + VALUE._col0 + - key + _col0 - - src3 - @@ -2105,18 +1607,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -2126,31 +1677,76 @@ + + + + + 2 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_5_NUM_INPUT_ROWS + CNTR_NAME_RS_11_NUM_INPUT_ROWS - CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS + CNTR_NAME_RS_11_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_5_TIME_TAKEN + CNTR_NAME_RS_11_TIME_TAKEN - CNTR_NAME_SEL_5_FATAL_ERROR + CNTR_NAME_RS_11_FATAL_ERROR - SEL_5 + RS_11 - + @@ -2161,8 +1757,11 @@ - _col0 + VALUE._col0 + + c + @@ -2171,8 +1770,11 @@ - _col1 + VALUE._col1 + + c + @@ -2186,121 +1788,85 @@ - - - - - - - - - - - - - - key - - - src3 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - - - - - - - - key - - - src3 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src3 - + + + _col0 + + + key + + + src3 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_SEL_5_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_SEL_5_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_SEL_5_FATAL_ERROR - FIL_4 + SEL_5 - + @@ -2309,26 +1875,20 @@ - + - key + _col0 - - src3 - - + - value + _col1 - - src3 - @@ -2465,10 +2025,30 @@ - + + + key + + + src3 + + + + + - + + + value + + + src3 + + + + + Index: ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join8.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -284,249 +284,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_6_NUM_INPUT_ROWS - - - CNTR_NAME_RS_6_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_6_TIME_TAKEN - - - CNTR_NAME_RS_6_FATAL_ERROR - - - - - RS_6 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -535,18 +320,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -556,31 +390,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_5_NUM_INPUT_ROWS + CNTR_NAME_RS_6_NUM_INPUT_ROWS - CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS + CNTR_NAME_RS_6_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_5_TIME_TAKEN + CNTR_NAME_RS_6_TIME_TAKEN - CNTR_NAME_SEL_5_FATAL_ERROR + CNTR_NAME_RS_6_FATAL_ERROR - SEL_5 + RS_6 - + @@ -591,8 +467,11 @@ - _col0 + VALUE._col0 + + a + @@ -601,8 +480,11 @@ - _col1 + VALUE._col1 + + a + @@ -616,124 +498,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_SEL_5_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_SEL_5_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_SEL_5_FATAL_ERROR - FIL_4 + SEL_5 @@ -747,26 +585,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -806,7 +638,11 @@ - + + + int + + 10 @@ -819,7 +655,11 @@ - + + + boolean + + @@ -875,21 +715,21 @@ - CNTR_NAME_FIL_13_NUM_INPUT_ROWS + CNTR_NAME_FIL_14_NUM_INPUT_ROWS - CNTR_NAME_FIL_13_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_14_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_13_TIME_TAKEN + CNTR_NAME_FIL_14_TIME_TAKEN - CNTR_NAME_FIL_13_FATAL_ERROR + CNTR_NAME_FIL_14_FATAL_ERROR - FIL_13 + FIL_14 @@ -903,10 +743,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -1006,252 +866,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_7_NUM_INPUT_ROWS - - - CNTR_NAME_RS_7_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_7_TIME_TAKEN - - - CNTR_NAME_RS_7_FATAL_ERROR - - - - - RS_7 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1260,18 +902,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1281,31 +972,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_RS_7_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_RS_7_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_RS_7_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_RS_7_FATAL_ERROR - SEL_2 + RS_7 - + @@ -1316,8 +1052,11 @@ - _col0 + VALUE._col0 + + b + @@ -1326,8 +1065,11 @@ - _col1 + VALUE._col1 + + b + @@ -1341,121 +1083,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 - + @@ -1464,26 +1170,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1592,21 +1292,21 @@ - CNTR_NAME_FIL_14_NUM_INPUT_ROWS + CNTR_NAME_FIL_15_NUM_INPUT_ROWS - CNTR_NAME_FIL_14_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_15_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_14_TIME_TAKEN + CNTR_NAME_FIL_15_TIME_TAKEN - CNTR_NAME_FIL_14_FATAL_ERROR + CNTR_NAME_FIL_15_FATAL_ERROR - FIL_14 + FIL_15 @@ -1620,10 +1320,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + @@ -1883,11 +1603,11 @@ - + - + @@ -2137,7 +1857,7 @@ - + @@ -2217,95 +1937,125 @@ - - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - - - - - - + + + + _col3 + + + _col3 - - + + b - + + + _col2 + + + _col2 + + + b + + + + + + + + _col1 + + + _col1 + + + a + + + + + + + + _col0 + + + _col0 + + + a + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + + + - CNTR_NAME_FIL_10_NUM_INPUT_ROWS + CNTR_NAME_SEL_9_NUM_INPUT_ROWS - CNTR_NAME_FIL_10_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_9_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_10_TIME_TAKEN + CNTR_NAME_SEL_9_TIME_TAKEN - CNTR_NAME_FIL_10_FATAL_ERROR + CNTR_NAME_SEL_9_FATAL_ERROR - FIL_10 + SEL_9 - + @@ -2314,7 +2064,7 @@ - + _col0 @@ -2324,7 +2074,7 @@ - + _col1 @@ -2334,7 +2084,7 @@ - + _col2 @@ -2344,7 +2094,7 @@ - + _col3 @@ -2361,120 +2111,96 @@ - - - - _col3 - - - _col3 + + + + + + + + + + + + + + _col2 + + + b + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + a + + + + + + + + + + + + + + + + + - - b + + - + - - _col2 - - - _col2 - - - b - - - - - - - - _col1 - - - _col1 - - - a - - - - - - - - _col0 - - - _col0 - - - a - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - - - - CNTR_NAME_SEL_9_NUM_INPUT_ROWS + CNTR_NAME_FIL_13_NUM_INPUT_ROWS - CNTR_NAME_SEL_9_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_13_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_9_TIME_TAKEN + CNTR_NAME_FIL_13_TIME_TAKEN - CNTR_NAME_SEL_9_FATAL_ERROR + CNTR_NAME_FIL_13_FATAL_ERROR - SEL_9 + FIL_13 @@ -2488,16 +2214,56 @@ - + + + _col0 + + + a + + + + + - + + + _col1 + + + a + + + + + - + + + _col2 + + + b + + + + + - + + + _col3 + + + b + + + + + @@ -2723,56 +2489,16 @@ - - - _col0 - - - a - - - - - + - - - _col1 - - - a - - - - - + - - - _col2 - - - b - - - - - + - - - _col3 - - - b - - - - - + Index: ql/src/test/results/compiler/plan/sample1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample1.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -75,11 +75,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 transient_lastDdlTime - 1297928521 + 1300391430 @@ -141,11 +141,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1297928521 + 1300391430 @@ -169,282 +169,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-02-16_23-42-11_286_7194349373643907615/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-02-16_23-42-11_286_7194349373643907615/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3 - - - serialization.format - 1 - - - columns.types - string:string:string:string - - - - - - - 1 - - + + + + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-50-41_329_5339473553297016927/-ext-10001 + + + 1 + + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-50-41_329_5339473553297016927/-ext-10001/ + + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - CNTR_NAME_FS_4_NUM_INPUT_ROWS - - - CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_4_TIME_TAKEN - - - CNTR_NAME_FS_4_FATAL_ERROR - - + + org.apache.hadoop.mapred.TextInputFormat - - FS_4 + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - + + + + columns + _col0,_col1,_col2,_col3 - - - - - - - - - - _col0 - - - - - - - - string - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - + + serialization.format + 1 + + columns.types + string:string:string:string + - - - - - - _col3 - - - hr - - - true - - - s - - - - - + + 1 - - _col2 - - - ds - - - true - - - s - - - - - - - - _col1 - - - value - - - s - - - - - - - - _col0 - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - - - true - - - - CNTR_NAME_SEL_3_NUM_INPUT_ROWS + CNTR_NAME_FS_4_NUM_INPUT_ROWS - CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS + CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_3_TIME_TAKEN + CNTR_NAME_FS_4_TIME_TAKEN - CNTR_NAME_SEL_3_FATAL_ERROR + CNTR_NAME_FS_4_FATAL_ERROR - SEL_3 + FS_4 - + @@ -454,30 +251,28 @@ - - key - _col0 - s + - + + + string + + - - value - _col1 - s + @@ -486,14 +281,11 @@ - - ds - _col2 - s + @@ -502,14 +294,11 @@ - - hr - _col3 - s + @@ -524,126 +313,129 @@ - - - - - - - - - - - - - - ds - - - true - - - s - - - - - - - - - - - - - 2008-04-08 - - - - - - - - - - - - boolean - - - - - - - - - - - - - hr - - - true - - - s - - - - - - - - - - - - - 11 - - - - - - - - - - - - - - + + + + _col3 + + + hr - - + + true + + s + - + + + _col2 + + + ds + + + true + + + s + + + + + + + + _col1 + + + value + + + s + + + + + + + + _col0 + + + key + + + s + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + + + true + + + - CNTR_NAME_FIL_2_NUM_INPUT_ROWS + CNTR_NAME_SEL_3_NUM_INPUT_ROWS - CNTR_NAME_FIL_2_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_2_TIME_TAKEN + CNTR_NAME_SEL_3_TIME_TAKEN - CNTR_NAME_FIL_2_FATAL_ERROR + CNTR_NAME_SEL_3_FATAL_ERROR - FIL_2 + SEL_3 @@ -657,10 +449,13 @@ - - + + key + + _col0 + s @@ -670,10 +465,13 @@ - - + + value + + _col1 + s @@ -683,10 +481,13 @@ - - + + ds + + _col2 + s @@ -696,10 +497,13 @@ - - + + hr + + _col3 + s @@ -767,7 +571,7 @@ - + int @@ -778,7 +582,7 @@ - + 2147483647 @@ -801,14 +605,14 @@ - + - + 1 @@ -831,14 +635,14 @@ - + - + 0 @@ -851,7 +655,11 @@ - + + + boolean + + @@ -888,16 +696,56 @@ - + + + key + + + s + + + + + - + + + value + + + s + + + + + - + + + ds + + + s + + + + + - + + + hr + + + s + + + + + @@ -1009,7 +857,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 s @@ -1021,7 +869,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 hr=11 @@ -1091,11 +939,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 transient_lastDdlTime - 1297928521 + 1300391430 @@ -1157,11 +1005,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcpart + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1297928521 + 1300391430 Index: ql/src/test/results/compiler/plan/sample2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample2.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/sample2.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928541 + 1300391452 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_6_NUM_INPUT_ROWS + CNTR_NAME_FS_5_NUM_INPUT_ROWS - CNTR_NAME_FS_6_NUM_OUTPUT_ROWS + CNTR_NAME_FS_5_NUM_OUTPUT_ROWS - CNTR_NAME_FS_6_TIME_TAKEN + CNTR_NAME_FS_5_TIME_TAKEN - CNTR_NAME_FS_6_FATAL_ERROR + CNTR_NAME_FS_5_FATAL_ERROR - FS_6 + FS_5 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_5_NUM_INPUT_ROWS + CNTR_NAME_TS_4_NUM_INPUT_ROWS - CNTR_NAME_TS_5_NUM_OUTPUT_ROWS + CNTR_NAME_TS_4_NUM_OUTPUT_ROWS - CNTR_NAME_TS_5_TIME_TAKEN + CNTR_NAME_TS_4_TIME_TAKEN - CNTR_NAME_TS_5_FATAL_ERROR + CNTR_NAME_TS_4_FATAL_ERROR - TS_5 + TS_4 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928541 + 1300391452 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928535 + 1300391446 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928535 + 1300391446 @@ -622,351 +622,150 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-22_117_3416802970134298370/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-50-53_434_1976906476534054319/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int - - + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -980,9 +779,9 @@ - + - key + _col0 s @@ -993,9 +792,9 @@ - + - value + _col1 s @@ -1015,6 +814,9 @@ + + true + @@ -1130,30 +932,37 @@ - + + + boolean + + + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_FIL_1_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_FIL_1_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_FIL_1_FATAL_ERROR - FIL_4 + FIL_1 @@ -1165,40 +974,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1258,7 +1051,50 @@ - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1272,7 +1108,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1284,10 +1120,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket - srcbucket0.txt + srcbucket org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1345,11 +1181,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928535 + 1300391446 @@ -1411,11 +1247,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928535 + 1300391446 Index: ql/src/test/results/compiler/plan/sample3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample3.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/sample3.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928552 + 1300391465 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_6_NUM_INPUT_ROWS + CNTR_NAME_FS_5_NUM_INPUT_ROWS - CNTR_NAME_FS_6_NUM_OUTPUT_ROWS + CNTR_NAME_FS_5_NUM_OUTPUT_ROWS - CNTR_NAME_FS_6_TIME_TAKEN + CNTR_NAME_FS_5_TIME_TAKEN - CNTR_NAME_FS_6_FATAL_ERROR + CNTR_NAME_FS_5_FATAL_ERROR - FS_6 + FS_5 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_5_NUM_INPUT_ROWS + CNTR_NAME_TS_4_NUM_INPUT_ROWS - CNTR_NAME_TS_5_NUM_OUTPUT_ROWS + CNTR_NAME_TS_4_NUM_OUTPUT_ROWS - CNTR_NAME_TS_5_TIME_TAKEN + CNTR_NAME_TS_4_TIME_TAKEN - CNTR_NAME_TS_5_FATAL_ERROR + CNTR_NAME_TS_4_FATAL_ERROR - TS_5 + TS_4 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928552 + 1300391465 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928546 + 1300391458 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928546 + 1300391458 @@ -622,336 +622,98 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-33_202_3298476252783239902/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-05_489_6971381113062016433/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - value - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int @@ -959,24 +721,51 @@ + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -990,9 +779,9 @@ - + - key + _col0 s @@ -1003,9 +792,9 @@ - + - value + _col1 s @@ -1025,6 +814,9 @@ + + true + @@ -1153,7 +945,11 @@ - + + + boolean + + @@ -1162,21 +958,21 @@ - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_FIL_1_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_FIL_1_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_FIL_1_FATAL_ERROR - FIL_4 + FIL_1 @@ -1188,40 +984,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1281,7 +1061,50 @@ - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1295,7 +1118,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1307,7 +1130,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket srcbucket @@ -1368,11 +1191,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928546 + 1300391458 @@ -1434,11 +1257,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928546 + 1300391458 Index: ql/src/test/results/compiler/plan/sample4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample4.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/sample4.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928563 + 1300391477 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_6_NUM_INPUT_ROWS + CNTR_NAME_FS_5_NUM_INPUT_ROWS - CNTR_NAME_FS_6_NUM_OUTPUT_ROWS + CNTR_NAME_FS_5_NUM_OUTPUT_ROWS - CNTR_NAME_FS_6_TIME_TAKEN + CNTR_NAME_FS_5_TIME_TAKEN - CNTR_NAME_FS_6_FATAL_ERROR + CNTR_NAME_FS_5_FATAL_ERROR - FS_6 + FS_5 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_5_NUM_INPUT_ROWS + CNTR_NAME_TS_4_NUM_INPUT_ROWS - CNTR_NAME_TS_5_NUM_OUTPUT_ROWS + CNTR_NAME_TS_4_NUM_OUTPUT_ROWS - CNTR_NAME_TS_5_TIME_TAKEN + CNTR_NAME_TS_4_TIME_TAKEN - CNTR_NAME_TS_5_FATAL_ERROR + CNTR_NAME_TS_4_FATAL_ERROR - TS_5 + TS_4 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928563 + 1300391477 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928557 + 1300391470 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928557 + 1300391470 @@ -622,351 +622,150 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-44_197_4110427304140600093/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-17_628_9159475183698542444/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int - - + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -980,9 +779,9 @@ - + - key + _col0 s @@ -993,9 +792,9 @@ - + - value + _col1 s @@ -1015,6 +814,9 @@ + + true + @@ -1130,30 +932,37 @@ - + + + boolean + + + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_FIL_1_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_FIL_1_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_FIL_1_FATAL_ERROR - FIL_4 + FIL_1 @@ -1165,40 +974,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1258,7 +1051,50 @@ - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1272,7 +1108,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1284,10 +1120,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket - srcbucket0.txt + srcbucket org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1345,11 +1181,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928557 + 1300391470 @@ -1411,11 +1247,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928557 + 1300391470 Index: ql/src/test/results/compiler/plan/sample5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample5.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/sample5.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928574 + 1300391489 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_6_NUM_INPUT_ROWS + CNTR_NAME_FS_5_NUM_INPUT_ROWS - CNTR_NAME_FS_6_NUM_OUTPUT_ROWS + CNTR_NAME_FS_5_NUM_OUTPUT_ROWS - CNTR_NAME_FS_6_TIME_TAKEN + CNTR_NAME_FS_5_TIME_TAKEN - CNTR_NAME_FS_6_FATAL_ERROR + CNTR_NAME_FS_5_FATAL_ERROR - FS_6 + FS_5 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_5_NUM_INPUT_ROWS + CNTR_NAME_TS_4_NUM_INPUT_ROWS - CNTR_NAME_TS_5_NUM_OUTPUT_ROWS + CNTR_NAME_TS_4_NUM_OUTPUT_ROWS - CNTR_NAME_TS_5_TIME_TAKEN + CNTR_NAME_TS_4_TIME_TAKEN - CNTR_NAME_TS_5_FATAL_ERROR + CNTR_NAME_TS_4_FATAL_ERROR - TS_5 + TS_4 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928574 + 1300391489 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928568 + 1300391483 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928568 + 1300391483 @@ -622,323 +622,98 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-42-55_426_5045529589933618268/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-29_982_5179648393784620836/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 5 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int @@ -946,24 +721,51 @@ + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -977,9 +779,9 @@ - + - key + _col0 s @@ -990,9 +792,9 @@ - + - value + _col1 s @@ -1012,6 +814,9 @@ + + true + @@ -1127,7 +932,11 @@ - + + + boolean + + @@ -1136,21 +945,21 @@ - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_FIL_1_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_FIL_1_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_FIL_1_FATAL_ERROR - FIL_4 + FIL_1 @@ -1162,40 +971,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1255,7 +1048,50 @@ - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1269,7 +1105,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1281,7 +1117,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket srcbucket @@ -1342,11 +1178,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928568 + 1300391483 @@ -1408,11 +1244,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928568 + 1300391483 Index: ql/src/test/results/compiler/plan/sample6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample6.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/sample6.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928586 + 1300391501 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_6_NUM_INPUT_ROWS + CNTR_NAME_FS_5_NUM_INPUT_ROWS - CNTR_NAME_FS_6_NUM_OUTPUT_ROWS + CNTR_NAME_FS_5_NUM_OUTPUT_ROWS - CNTR_NAME_FS_6_TIME_TAKEN + CNTR_NAME_FS_5_TIME_TAKEN - CNTR_NAME_FS_6_FATAL_ERROR + CNTR_NAME_FS_5_FATAL_ERROR - FS_6 + FS_5 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_5_NUM_INPUT_ROWS + CNTR_NAME_TS_4_NUM_INPUT_ROWS - CNTR_NAME_TS_5_NUM_OUTPUT_ROWS + CNTR_NAME_TS_4_NUM_OUTPUT_ROWS - CNTR_NAME_TS_5_TIME_TAKEN + CNTR_NAME_TS_4_TIME_TAKEN - CNTR_NAME_TS_5_FATAL_ERROR + CNTR_NAME_TS_4_FATAL_ERROR - TS_5 + TS_4 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928586 + 1300391501 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928580 + 1300391495 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928580 + 1300391495 @@ -622,351 +622,150 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-06_605_6484972772014447395/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-42_109_3641641327366502669/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 4 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int - - + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -980,9 +779,9 @@ - + - key + _col0 s @@ -993,9 +792,9 @@ - + - value + _col1 s @@ -1015,6 +814,9 @@ + + true + @@ -1130,30 +932,37 @@ - + + + boolean + + + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_FIL_1_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_FIL_1_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_FIL_1_FATAL_ERROR - FIL_4 + FIL_1 @@ -1165,40 +974,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1258,7 +1051,50 @@ - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1272,7 +1108,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1284,10 +1120,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket - srcbucket0.txt + srcbucket org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1345,11 +1181,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928580 + 1300391495 @@ -1411,11 +1247,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928580 + 1300391495 Index: ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample7.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10000/ + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1297928597 + 1300391513 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/dest1 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1297928597 + 1300391513 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10001 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10000 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10002 + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928591 + 1300391507 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928591 + 1300391507 @@ -626,238 +626,98 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-17_842_5108524333287494418/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_4_NUM_INPUT_ROWS - - - CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_4_TIME_TAKEN - - - CNTR_NAME_FS_4_FATAL_ERROR - - - - - FS_4 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-51-54_373_422688100743061996/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_3_NUM_INPUT_ROWS + CNTR_NAME_FS_4_NUM_INPUT_ROWS - CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS + CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_3_TIME_TAKEN + CNTR_NAME_FS_4_TIME_TAKEN - CNTR_NAME_SEL_3_FATAL_ERROR + CNTR_NAME_FS_4_FATAL_ERROR - SEL_3 + FS_4 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - - - - - - - key - - - s - - - - - - - - - - - - - 100 - - - - + + + + _col1 + + + value - - + + s - + + + + + + _col0 + + + key + + + s + + + - boolean + int @@ -865,24 +725,51 @@ + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + - CNTR_NAME_FIL_2_NUM_INPUT_ROWS + CNTR_NAME_SEL_3_NUM_INPUT_ROWS - CNTR_NAME_FIL_2_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_2_TIME_TAKEN + CNTR_NAME_SEL_3_TIME_TAKEN - CNTR_NAME_FIL_2_FATAL_ERROR + CNTR_NAME_SEL_3_FATAL_ERROR - FIL_2 + SEL_3 @@ -896,9 +783,9 @@ - + - key + _col0 s @@ -909,9 +796,9 @@ - + - value + _col1 s @@ -1049,7 +936,11 @@ - + + + boolean + + @@ -1089,10 +980,30 @@ - + + + key + + + s + + + + + - + + + value + + + s + + + + + @@ -1109,165 +1020,32 @@ - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 4 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + + key - - + + s - + - - - - - - - key - - - s - - - - - - - - - - - - - 100 - - - - - - - - + - + + + 100 + - + @@ -1412,7 +1190,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1424,7 +1202,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt srcbucket0.txt @@ -1485,11 +1263,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928591 + 1300391507 @@ -1551,11 +1329,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/srcbucket + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1297928591 + 1300391507 Index: ql/src/test/results/compiler/plan/subq.q.xml =================================================================== --- ql/src/test/results/compiler/plan/subq.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/subq.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -41,7 +41,7 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10001 @@ -50,7 +50,7 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10000 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10000 1 @@ -195,10 +195,10 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10001 - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10001 @@ -207,7 +207,7 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10001 -ext-10001 @@ -280,7 +280,7 @@ true - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10000 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10000 ../build/ql/test/data/warehouse/union.out @@ -304,10 +304,10 @@ true - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10001 - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10000 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10000 @@ -333,7 +333,7 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10001 @@ -419,11 +419,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928606 + 1300391523 @@ -481,11 +481,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928606 + 1300391523 @@ -509,198 +509,65 @@ - + - + - - - - - - - - - 1 - - - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10001 - - - 1 - - - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-43-29_224_3870164475862336610/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_4_NUM_INPUT_ROWS - - - CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_4_TIME_TAKEN - - - CNTR_NAME_FS_4_FATAL_ERROR - - - - - FS_4 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - _col1 - - - src - - - - - + + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10001 - - _col0 - - - _col0 - - - src - - - - - + + 1 - - - - - - - - - - - - - + + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-52-06_605_3391647272849112971/-ext-10000/ - - - - _col0 - - - _col1 - - + + - - true + + 1 - CNTR_NAME_SEL_3_NUM_INPUT_ROWS + CNTR_NAME_FS_4_NUM_INPUT_ROWS - CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS + CNTR_NAME_FS_4_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_3_TIME_TAKEN + CNTR_NAME_FS_4_TIME_TAKEN - CNTR_NAME_SEL_3_FATAL_ERROR + CNTR_NAME_FS_4_FATAL_ERROR - SEL_3 + FS_4 - + - - - - - - - key - - - _col0 - - - src - - - - - - - - - - value - - - _col1 - - - src - - - - - - - - - + @@ -710,9 +577,9 @@ _col1 - + - value + _col1 src @@ -724,9 +591,9 @@ _col0 - + - key + _col0 src @@ -743,10 +610,10 @@ - + - + @@ -768,26 +635,26 @@ - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_SEL_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_SEL_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_SEL_3_FATAL_ERROR - SEL_2 + SEL_3 - + @@ -797,6 +664,9 @@ + + key + _col0 @@ -810,6 +680,9 @@ + + value + _col1 @@ -829,73 +702,83 @@ - - - - - - - - - - key - - - src - - - - - - - - - - - - int - - - - - 100 - - - - + + + + _col1 + + + value - - + + src - - - boolean - - + + + _col0 + + + key + + + src + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -909,9 +792,9 @@ - + - key + _col0 src @@ -922,9 +805,9 @@ - + - value + _col1 src @@ -964,7 +847,11 @@ - + + + int + + 100 @@ -977,7 +864,11 @@ - + + + boolean + + @@ -1014,10 +905,30 @@ - + + + key + + + src + + + + + - + + + value + + + src + + + + + @@ -1116,7 +1027,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src unioninput:src @@ -1128,7 +1039,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -1185,11 +1096,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928606 + 1300391523 @@ -1247,11 +1158,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928606 + 1300391523 Index: ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf1.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -62,11 +62,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928617 + 1300391535 @@ -124,11 +124,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928617 + 1300391535 @@ -152,1261 +152,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-02-16_23-43-40_456_6046106067552381652/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-02-16_23-43-40_456_6046106067552381652/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16 - - - serialization.format - 1 - - - columns.types - boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:string:string:string:string - - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - - - - - _col0 - - - - - - - - boolean - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - _col4 - - - - - - - - - - - - - _col5 - - - - - - - - - - - - - _col6 - - - - - - - - - - - - - _col7 - - - - - - - - - - - - - _col8 - - - - - - - - - - - - - _col9 - - - - - - - - - - - - - _col10 - - - - - - - - - - - - - _col11 - - - - - - - - - - - - - _col12 - - - - - - - - - - - - - _col13 - - - - - - - - string - - - - - - - - - _col14 - - - - - - - - - - - - - _col15 - - - - - - - - - - - - - _col16 - - - - - - - - - - - - - - + + + + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-52-18_917_6586548964302300130/-ext-10001 - - - - - - _col8 - - - - - - - - - - - - - - - - - - - - .* - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - + + 1 - - _col7 - - - - - - - - - - ab - - - - - - - - - - a - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - + + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_12-52-18_917_6586548964302300130/-ext-10001/ - - _col6 - - - - - - - - - - ab - - - - - - - - - - _a% - - - - + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - + + org.apache.hadoop.mapred.TextInputFormat - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - _col5 - - - - - - - - - - ab - - + + + + columns + _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16 - - - - - - - \%\_ - - + + serialization.format + 1 - - - - - - true + + columns.types + boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:string:string:string:string - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - _col4 - - - - - - - - - - %_ - - - - - - - - - - \%\_ - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - + + 1 - - _col3 - - - - - - - - - - ab - - - - - - - - - - %a_ - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - - - - _col2 - - - - - - - - - - ab - - - - - - - - - - %a% - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - - - - _col1 - - - - - - - - - - b - - - - - - - - - - %a% - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - - - - _col9 - - - - - - - - - - a - - - - - - - - - - [ab] - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - - - - _col13 - - - - - - - - - - abc - - - - - - - - - - b - - - - - - - - - - c - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - regexp_replace - - - - - - - - - - _col12 - - - - - - - - - - hadoop - - - - - - - - - - o* - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - - - - _col11 - - - - - - - - - - hadoop - - - - - - - - - - [a-z]* - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - - - - _col10 - - - - - - - - - - - - - - - - - - - - [ab] - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - - - - _col16 - - - - - - - - - - hadoop - - - - - - - - - - (.)[a-z]* - - - - - - - - - - $1ive - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - regexp_replace - - - - - - - - - - _col15 - - - - - - - - - - abbbb - - - - - - - - - - bb - - - - - - - - - - b - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - regexp_replace - - - - - - - - - - _col14 - - - - - - - - - - abc - - - - - - - - - - z - - - - - - - - - - a - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - regexp_replace - - - - - - - - - - _col0 - - - - - - - - - - a - - - - - - - - - - %a% - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - _col4 - - - _col5 - - - _col6 - - - _col7 - - - _col8 - - - _col9 - - - _col10 - - - _col11 - - - _col12 - - - _col13 - - - _col14 - - - _col15 - - - _col16 - - - - - - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + @@ -1416,25 +234,29 @@ - - _c0 - _col0 + + + - + + + boolean + + - - _c1 - _col1 + + + @@ -1442,12 +264,12 @@ - - _c2 - _col2 + + + @@ -1455,12 +277,12 @@ - - _c3 - _col3 + + + @@ -1468,12 +290,12 @@ - - _c4 - _col4 + + + @@ -1481,12 +303,12 @@ - - _c5 - _col5 + + + @@ -1494,12 +316,12 @@ - - _c6 - _col6 + + + @@ -1507,12 +329,12 @@ - - _c7 - _col7 + + + @@ -1520,12 +342,12 @@ - - _c8 - _col8 + + + @@ -1533,12 +355,12 @@ - - _c9 - _col9 + + + @@ -1546,12 +368,12 @@ - - _c10 - _col10 + + + @@ -1559,12 +381,12 @@ - - _c11 - _col11 + + + @@ -1572,12 +394,12 @@ - - _c12 - _col12 + + + @@ -1585,25 +407,29 @@ - - _c13 - _col13 + + + - + + + string + + - - _c14 - _col14 + + + @@ -1611,12 +437,12 @@ - - _c15 - _col15 + + + @@ -1624,12 +450,12 @@ - - _c16 - _col16 + + + @@ -1643,69 +469,935 @@ - - - - + + + + _col8 + - - - key + + + - - src + + + + + + + + .* + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col7 + + + - - - int - - + - 86 + ab + + + + + + + a + + + - + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + _col6 + + + + + + + + + + ab + + + + + + + + + + _a% + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col5 + + + + + + + + + + ab + + + + + + + + + + \%\_ + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col4 + + + + + + + + + + %_ + + + + + + + + + + \%\_ + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col3 + + + + + + + + + + ab + + + + + + + + + + %a_ + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col2 + + + + + + + + + + ab + + + + + + + + + + %a% + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col1 + + + + + + + + + + b + + + + + + + + + + %a% + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col9 + + + + + + + + + + a + + + + + + + + + + [ab] + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col13 + + + + + + + + + + abc + + + + + + + + + + b + + + + + + + + + + c + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + regexp_replace + + + + + + + + + + _col12 + + + + + + + + + + hadoop + + + + + + + + + + o* + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col11 + + + + + + + + + + hadoop + + + + + + + + + + [a-z]* + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col10 + + + + + + + + + + + + + + + + + + + + [ab] + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col16 + + + + + + + + + + hadoop + + + + + + + + + + (.)[a-z]* + + + + + + + + + + $1ive + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + regexp_replace + + + + + + + + + + _col15 + + + + + + + + + + abbbb + + + + + + + + + + bb + + + + + + + + + + b + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + regexp_replace + + + + + + + + + + _col14 + + + + + + + + + + abc + + + + + + + + + + z + + + + + + + + + + a + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + regexp_replace + + + + + + + + + + _col0 + + + + + + + + + + a + + + + + + + + + + %a% + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + _col4 + + + _col5 + + + _col6 + + + _col7 + + + _col8 + + + _col9 + + + _col10 + + + _col11 + + + _col12 + + + _col13 + + + _col14 + + + _col15 + + + _col16 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -1719,18 +1411,226 @@ - + + + _c0 + - key + _col0 - - src + + + + + + + + _c1 + + + _col1 + + + + + + + + + _c2 + + + _col2 + + + + + + + + + + _c3 + + + _col3 + + + + + + + + + + _c4 + + + _col4 + + + + + + + + + + _c5 + + + _col5 + + + + + + + + + + _c6 + + + _col6 + + + + + + + + + + _c7 + + + _col7 + + + + + + + + + + _c8 + + + _col8 + + + + + + + + + + _c9 + + + _col9 + + + + + + + + + + _c10 + + + _col10 + + + + + + + + + + _c11 + + + _col11 + + + + + + + + + + _c12 + + + _col12 + + + + + + + + + + _c13 + + + _col13 + + + + + + _c14 + + + _col14 + + + + + + + + + + _c15 + + + _col15 + + + + + + + + + + _c16 + + + _col16 + + + + + + @@ -1761,7 +1661,11 @@ - + + + int + + 86 @@ -1811,7 +1715,17 @@ - + + + key + + + src + + + + + @@ -1920,7 +1834,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -1932,7 +1846,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -1989,11 +1903,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928617 + 1300391535 @@ -2051,11 +1965,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928617 + 1300391535 Index: ql/src/test/results/compiler/plan/union.q.xml =================================================================== --- ql/src/test/results/compiler/plan/union.q.xml (revision 1083142) +++ ql/src/test/results/compiler/plan/union.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -41,7 +41,7 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10001 @@ -50,7 +50,7 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10000 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10000 1 @@ -195,10 +195,10 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10001 - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10001 @@ -207,7 +207,7 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10001 -ext-10001 @@ -280,7 +280,7 @@ true - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10000 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10000 ../build/ql/test/data/warehouse/union.out @@ -304,10 +304,10 @@ true - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10001 - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10000 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10000 @@ -333,7 +333,7 @@ - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10001 + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10001 @@ -419,11 +419,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928673 + 1300391597 @@ -481,11 +481,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928673 + 1300391597 @@ -551,11 +551,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928673 + 1300391597 @@ -613,11 +613,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928673 + 1300391597 @@ -641,620 +641,516 @@ - + - + - + - - - - - - - - - 1 - - - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10001 - - - 1 - - - file:/data/users/sdong/www/open-source-hive3/build/ql/scratchdir/hive_2011-02-16_23-44-36_753_3307275503036394113/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_8_NUM_INPUT_ROWS - - - CNTR_NAME_FS_8_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_8_TIME_TAKEN - - - CNTR_NAME_FS_8_FATAL_ERROR - - - - - FS_8 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - _col1 - - - src - - - - - + + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10001 - - _col0 - - - _col0 - - - src - - - - - + + 1 - - - - - - - - - - - - - + + file:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_12-53-20_166_6342884301608383323/-ext-10000/ - - - - _col0 - - - _col1 - - + + - - true + + 1 - CNTR_NAME_SEL_7_NUM_INPUT_ROWS + CNTR_NAME_FS_8_NUM_INPUT_ROWS - CNTR_NAME_SEL_7_NUM_OUTPUT_ROWS + CNTR_NAME_FS_8_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_7_TIME_TAKEN + CNTR_NAME_FS_8_TIME_TAKEN - CNTR_NAME_SEL_7_FATAL_ERROR + CNTR_NAME_FS_8_FATAL_ERROR - SEL_7 + FS_8 - + - - - - - - - key - - - _col0 - - - src - - - - - - - - - - value - - - _col1 - - - src - - - - - - - - - + + + + + _col1 + + + _col1 + + + src + + + + + + + + _col0 + + + _col0 + + + src + + + + + + + + - + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + - CNTR_NAME_UNION_6_NUM_INPUT_ROWS + CNTR_NAME_SEL_7_NUM_INPUT_ROWS - CNTR_NAME_UNION_6_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_7_NUM_OUTPUT_ROWS - CNTR_NAME_UNION_6_TIME_TAKEN + CNTR_NAME_SEL_7_TIME_TAKEN - CNTR_NAME_UNION_6_FATAL_ERROR + CNTR_NAME_SEL_7_FATAL_ERROR - UNION_6 + SEL_7 - + - - - - - - + + + + + + + + + + key + + _col0 + + + src + + + + - - - + + + + value + + _col1 - - - value - - - src - - - - - - - _col0 - - - key - - - src - - - - - + + src + + + + + + + + + + + + + + + + + + CNTR_NAME_UNION_6_NUM_INPUT_ROWS + + + CNTR_NAME_UNION_6_NUM_OUTPUT_ROWS + + + CNTR_NAME_UNION_6_TIME_TAKEN + + + CNTR_NAME_UNION_6_FATAL_ERROR + + + + + UNION_6 + + + + + + + + + + + + + + + + + + + _col1 + + + value + + + src + + + + + + + + _col0 + + + key + + + src + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + + + + + CNTR_NAME_SEL_5_NUM_INPUT_ROWS + + + CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS + + + CNTR_NAME_SEL_5_TIME_TAKEN + + + CNTR_NAME_SEL_5_FATAL_ERROR + + + + + SEL_5 + + + + + + + + + + + + - - - - - + + + + + + + + + key + + + src + + + + + + + + + + + + int + + + + + 100 + + + + - - + + - - - - - - _col0 + + + + boolean + + - - _col1 - - - true - - CNTR_NAME_SEL_5_NUM_INPUT_ROWS + CNTR_NAME_FIL_10_NUM_INPUT_ROWS - CNTR_NAME_SEL_5_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_10_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_5_TIME_TAKEN + CNTR_NAME_FIL_10_TIME_TAKEN - CNTR_NAME_SEL_5_FATAL_ERROR + CNTR_NAME_FIL_10_FATAL_ERROR - SEL_5 + FIL_10 - + - + - - - - - - - - - key - - - src - - - - - - - - - - - - int - - - - - 100 - - - - - - - - - - - - boolean - - - - + + + src + + + - CNTR_NAME_FIL_4_NUM_INPUT_ROWS + CNTR_NAME_TS_3_NUM_INPUT_ROWS - CNTR_NAME_FIL_4_NUM_OUTPUT_ROWS + CNTR_NAME_TS_3_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_4_TIME_TAKEN + CNTR_NAME_TS_3_TIME_TAKEN - CNTR_NAME_FIL_4_FATAL_ERROR + CNTR_NAME_TS_3_FATAL_ERROR - - FIL_4 - - + - - - - - + 0 + + + 1 + + + + + TS_3 + + + + + + + + + key - - - - - - - - - - - - key - - - src - - - - - - - - - - - - - 100 - - - - - - - - - - - - + + src + + + - - - - CNTR_NAME_FIL_10_NUM_INPUT_ROWS + + + + value - - CNTR_NAME_FIL_10_NUM_OUTPUT_ROWS + + src - - CNTR_NAME_FIL_10_TIME_TAKEN + + - - CNTR_NAME_FIL_10_FATAL_ERROR - - - FIL_10 - - - - - - - - - - - + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + src + + + + + bigint - - - - src - - - - - - - - - - CNTR_NAME_TS_3_NUM_INPUT_ROWS - - - CNTR_NAME_TS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_TS_3_TIME_TAKEN - - - CNTR_NAME_TS_3_FATAL_ERROR - - - - - - - 0 - - - 1 - - - - - TS_3 - - - - - - - - - key - - - src - - - - - - - - - - value - - - src - - - - - - - - - - true - - - BLOCK__OFFSET__INSIDE__FILE - - - src - - - - - bigint - - - - - - - - - true - - - INPUT__FILE__NAME - - - src - - - - - - - - - - - - - - + + + + true + + INPUT__FILE__NAME + + + src + + + + - - - - - - - - - - - - - - @@ -1262,34 +1158,7 @@ - - - - - _col0 - - - src - - - - - - - - - - _col1 - - - src - - - - - - - + @@ -1302,7 +1171,7 @@ - + _col0 @@ -1315,7 +1184,7 @@ - + _col1 @@ -1335,143 +1204,96 @@ - - - - _col1 - - - value - - - src - - - - - - - - _col0 - - - key - - - src - - - - - - - - - - - + + + - + + + _col0 + + + src + + + + + - + + + _col1 + + + src + + + + + - - - - _col0 - - - _col1 - - - - - - - CNTR_NAME_SEL_2_NUM_INPUT_ROWS - - - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - - - CNTR_NAME_SEL_2_TIME_TAKEN - - - CNTR_NAME_SEL_2_FATAL_ERROR - - + + + + + + + + _col1 + + + value - - SEL_2 + + src - - - - - - + + - - - - - - - - - - - - - + + + + _col0 + + + key + + src + + + + - - - - - - - - - key - - - src - - - - - - - - - - - - - 100 - - - - + + + + + - - + + - - + + + + + + _col0 + + _col1 + @@ -1479,21 +1301,21 @@ - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -1507,30 +1329,10 @@ - - - key - - - src - - - - - + - - - value - - - src - - - - - + @@ -1612,10 +1414,30 @@ - + + + key + + + src + + + + + - + + + value + + + src + + + + + @@ -1714,7 +1536,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src null-subquery1:unioninput-subquery1:src @@ -1729,7 +1551,7 @@ - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src src @@ -1786,11 +1608,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928673 + 1300391597 @@ -1848,11 +1670,11 @@ location - pfile:/data/users/sdong/www/open-source-hive3/build/ql/test/data/warehouse/src + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1297928673 + 1300391597