Index: common/src/java/org/apache/hadoop/hive/conf/HiveConf.java =================================================================== --- common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (revision 1145463) +++ common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (working copy) @@ -368,6 +368,7 @@ HIVEOPTCP("hive.optimize.cp", true), // column pruner HIVEOPTINDEXFILTER("hive.optimize.index.filter", false), // automatically use indexes HIVEOPTPPD("hive.optimize.ppd", true), // predicate pushdown + HIVEPPDREMOVEDUPLICATEFILTERS("hive.ppd.remove.duplicatefilters", true), // push predicates down to storage handlers HIVEOPTPPD_STORAGE("hive.optimize.ppd.storage", true), HIVEOPTGROUPBY("hive.optimize.groupby", true), // optimize group by Index: contrib/src/test/results/clientpositive/dboutput.q.out =================================================================== --- contrib/src/test/results/clientpositive/dboutput.q.out (revision 1145463) +++ contrib/src/test/results/clientpositive/dboutput.q.out (working copy) @@ -143,21 +143,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: dboutput('jdbc:derby:../build/test_dboutput_db','','','INSERT INTO app_info (kkey,vvalue) VALUES (?,?)',key,value) - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: dboutput('jdbc:derby:../build/test_dboutput_db','','','INSERT INTO app_info (kkey,vvalue) VALUES (?,?)',key,value) + type: int + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: contrib/src/test/results/clientpositive/serde_typedbytes4.q.out =================================================================== --- contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (revision 1145463) +++ contrib/src/test/results/clientpositive/serde_typedbytes4.q.out (working copy) @@ -50,42 +50,38 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: UDFToByte(key) - type: tinyint - expr: value - type: string - outputColumnNames: _col0, _col1 - Transform Operator - command: /bin/cat - output info: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - Select Operator - expressions: + Select Operator + expressions: + expr: UDFToByte(key) + type: tinyint + expr: value + type: string + outputColumnNames: _col0, _col1 + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator Index: hbase-handler/src/test/results/hbase_pushdown.q.out =================================================================== --- hbase-handler/src/test/results/hbase_pushdown.q.out (revision 1145463) +++ hbase-handler/src/test/results/hbase_pushdown.q.out (working copy) @@ -103,23 +103,19 @@ predicate: expr: (value like '%90%') type: boolean - Filter Operator - predicate: - expr: ((key = 90) and (value like '%90%')) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -166,23 +162,19 @@ predicate: expr: ((value like '%90%') and (key = UDFToInteger(value))) type: boolean - Filter Operator - predicate: - expr: (((key = 90) and (value like '%90%')) and (key = UDFToInteger(value))) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -217,23 +209,19 @@ predicate: expr: (((key = 80) and (key = 90)) and (value like '%90%')) type: boolean - Filter Operator - predicate: - expr: (((key = 80) and (key = 90)) and (value like '%90%')) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -300,23 +288,19 @@ predicate: expr: (CASE WHEN ((key = 90)) THEN (2) ELSE (4) END > 3) type: boolean - Filter Operator - predicate: - expr: (CASE WHEN ((key = 90)) THEN (2) ELSE (4) END > 3) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -353,23 +337,19 @@ predicate: expr: ((key = 80) or (value like '%90%')) type: boolean - Filter Operator - predicate: - expr: ((key = 80) or (value like '%90%')) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -402,23 +382,19 @@ predicate: expr: (key = 90) type: boolean - Filter Operator - predicate: - expr: (key = 90) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: hbase-handler/src/test/results/hbase_queries.q.out =================================================================== --- hbase-handler/src/test/results/hbase_queries.q.out (revision 1145463) +++ hbase-handler/src/test/results/hbase_queries.q.out (working copy) @@ -50,32 +50,28 @@ predicate: expr: ((key % 2) = 0) type: boolean - Filter Operator - predicate: - expr: ((key % 2) = 0) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat - output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat - serde: org.apache.hadoop.hive.hbase.HBaseSerDe - name: default.hbase_table_1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.hive.hbase.HiveHBaseTableInputFormat + output format: org.apache.hadoop.hive.hbase.HiveHBaseTableOutputFormat + serde: org.apache.hadoop.hive.hbase.HBaseSerDe + name: default.hbase_table_1 PREHOOK: query: FROM src INSERT OVERWRITE TABLE hbase_table_1 SELECT * WHERE (key%2)=0 @@ -306,24 +302,20 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: int - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 0 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 0 y:hbase_table_2 TableScan alias: hbase_table_2 @@ -331,31 +323,27 @@ predicate: expr: (key < 120) type: boolean - Filter Operator - predicate: - expr: (key < 120) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (revision 1145463) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Operator.java (working copy) @@ -675,6 +675,24 @@ } } + public void removeParent(Operator parent) { + int parentIndex = parentOperators.indexOf(parent); + assert parentIndex != -1; + if (parentOperators.size() == 1) { + parentOperators = null; + } else { + parentOperators.remove(parentIndex); + } + + int childIndex = parent.getChildOperators().indexOf(this); + assert childIndex != -1; + if (parent.getChildOperators().size() == 1) { + parent.setChildOperators(null); + } else { + parent.getChildOperators().remove(childIndex); + } + } + /** * Replace one parent with another at the same position. Chilren of the new * parent are not updated Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (revision 1145463) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java (working copy) @@ -66,21 +66,30 @@ private RowResolver toRR = null; /** - * this map contains a expr infos. Each key is a node in the expression tree - * and the information for each node is the value which is used while walking - * the tree by its parent. + * Values the expression sub-trees (predicates) that can be pushed down for + * root expression tree. Since there can be more than one alias in an + * expression tree, this is a map from the alias to predicates. */ private final Map> pushdownPreds; + /** - * Values the expression sub-trees (predicates) that can be pushed down for + * Values the expression sub-trees (predicates) that can not be pushed down for * root expression tree. Since there can be more than one alias in an * expression tree, this is a map from the alias to predicates. */ + private final Map> nonFinalPreds; + + /** + * this map contains a expr infos. Each key is a node in the expression tree + * and the information for each node is the value which is used while walking + * the tree by its parent. + */ private final Map exprInfoMap; private boolean isDeterministic = true; public ExprWalkerInfo() { pushdownPreds = new HashMap>(); + nonFinalPreds = new HashMap>(); exprInfoMap = new HashMap(); } @@ -91,6 +100,7 @@ pushdownPreds = new HashMap>(); exprInfoMap = new HashMap(); + nonFinalPreds = new HashMap>(); } /** @@ -214,6 +224,19 @@ } /** + * Adds the passed list of pushDowns for the alias. + * + * @param alias + * @param pushDowns + */ + public void addPushDowns(String alias, List pushDowns) { + if (pushdownPreds.get(alias) == null) { + pushdownPreds.put(alias, new ArrayList()); + } + pushdownPreds.get(alias).addAll(pushDowns); + } + + /** * Returns the list of pushdown expressions for each alias that appear in the * current operator's RowResolver. The exprs in each list can be combined * using conjunction (AND). @@ -225,6 +248,28 @@ } /** + * Adds the specified expr as a non-final candidate + * + * @param expr + */ + public void addNonFinalCandidate(ExprNodeDesc expr) { + String alias = getAlias(expr); + if (nonFinalPreds.get(alias) == null) { + nonFinalPreds.put(alias, new ArrayList()); + } + nonFinalPreds.get(alias).add(expr.clone()); + } + + /** + * Returns list of non-final candidate predicate for each map. + * + * @return + */ + public Map> getNonFinalCandidates() { + return nonFinalPreds; + } + + /** * Merges the specified pushdown predicates with the current class. * * @param ewi Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (revision 1145463) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerProcFactory.java (working copy) @@ -26,6 +26,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; @@ -277,9 +278,10 @@ egw.startWalking(startNodes, null); + HiveConf conf = opContext.getParseContext().getConf(); // check the root expression for final candidates for (ExprNodeDesc pred : clonedPreds) { - extractFinalCandidates(pred, exprContext); + extractFinalCandidates(pred, exprContext, conf); } return exprContext; } @@ -289,17 +291,20 @@ * candidates. */ private static void extractFinalCandidates(ExprNodeDesc expr, - ExprWalkerInfo ctx) { + ExprWalkerInfo ctx, HiveConf conf) { if (ctx.isCandidate(expr)) { ctx.addFinalCandidate(expr); return; + } else if (!FunctionRegistry.isOpAnd(expr) && + HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { + ctx.addNonFinalCandidate(expr); } if (FunctionRegistry.isOpAnd(expr)) { // If the operator is AND, we need to determine if any of the children are // final candidates. for (Node ch : expr.getChildren()) { - extractFinalCandidates((ExprNodeDesc) ch, ctx); + extractFinalCandidates((ExprNodeDesc) ch, ctx, conf); } } } Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (revision 1145463) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java (working copy) @@ -88,11 +88,31 @@ // script operator is a black-box to hive so no optimization here // assuming that nothing can be pushed above the script op // same with LIMIT op + // create a filter with all children predicates + OpWalkerInfo owi = (OpWalkerInfo) procCtx; + if (HiveConf.getBoolVar(owi.getParseContext().getConf(), + HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { + ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, null, false); + return createFilter((Operator)nd, unpushedPreds, owi); + } return null; } } + public static class UDTFPPD extends DefaultPPD implements NodeProcessor { + @Override + public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, + Object... nodeOutputs) throws SemanticException { + LOG.info("Processing for " + nd.getName() + "(" + + ((Operator) nd).getIdentifier() + ")"); + //Predicates for UDTF wont be candidates for its children. So, nothing to + //optimize here. See lateral_view_ppd.q for example. + return null; + } + + } + public static class LateralViewForwardPPD extends DefaultPPD implements NodeProcessor { @Override @@ -102,6 +122,10 @@ + ((Operator) nd).getIdentifier() + ")"); OpWalkerInfo owi = (OpWalkerInfo) procCtx; + // The lateral view forward operator has 2 children, a SELECT(*) and + // a SELECT(cols) (for the UDTF operator) The child at index 0 is the + // SELECT(*) because that's the way that the DAG was constructed. We + // only want to get the predicates from the SELECT(*). ExprWalkerInfo childPreds = owi .getPrunedPreds((Operator) nd.getChildren() .get(0)); @@ -146,22 +170,32 @@ OpWalkerInfo owi = (OpWalkerInfo) procCtx; Operator op = (Operator) nd; ExprNodeDesc predicate = (((FilterOperator) nd).getConf()).getPredicate(); - // get pushdown predicates for this operator's predicate - ExprWalkerInfo ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, - predicate); - if (!ewi.isDeterministic()) { - /* predicate is not deterministic */ - if (op.getChildren() != null && op.getChildren().size() == 1) { - createFilter(op, owi - .getPrunedPreds((Operator) (op - .getChildren().get(0))), owi); + ExprWalkerInfo ewi = new ExprWalkerInfo(); + // Don't push a sampling predicate since createFilter() always creates filter + // with isSamplePred = false. Also, the filterop with sampling pred is always + // a child of TableScan, so there is no need to push this predicate. + if (!((FilterOperator)op).getConf().getIsSamplingPred()) { + // get pushdown predicates for this operator's predicate + ewi = ExprWalkerProcFactory.extractPushdownPreds(owi, op, predicate); + if (!ewi.isDeterministic()) { + /* predicate is not deterministic */ + if (op.getChildren() != null && op.getChildren().size() == 1) { + createFilter(op, owi + .getPrunedPreds((Operator) (op + .getChildren().get(0))), owi); + } + return null; } - - return null; + if (HiveConf.getBoolVar(owi.getParseContext().getConf(), + HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { + // add this filter for deletion, if it does not have non-final candidates + if (ewi.getNonFinalCandidates().values().isEmpty()) { + owi.addCandidateFilterOp((FilterOperator)op); + } + } + logExpr(nd, ewi); + owi.putPrunedPreds((Operator) nd, ewi); } - - logExpr(nd, ewi); - owi.putPrunedPreds(op, ewi); // merge it with children predicates mergeWithChildrenPred(op, owi, ewi, null, false); @@ -182,7 +216,15 @@ OpWalkerInfo owi = (OpWalkerInfo) procCtx; Set aliases = getQualifiedAliases((JoinOperator) nd, owi .getRowResolver(nd)); - mergeWithChildrenPred(nd, owi, null, aliases, false); + boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, aliases, false); + if (HiveConf.getBoolVar(owi.getParseContext().getConf(), + HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { + if (hasUnpushedPredicates) { + aliases = null; + } + ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, aliases, false); + return createFilter((Operator)nd, unpushedPreds, owi); + } return null; } @@ -283,7 +325,15 @@ Object... nodeOutputs) throws SemanticException { LOG.info("Processing for " + nd.getName() + "(" + ((Operator) nd).getIdentifier() + ")"); - mergeWithChildrenPred(nd, (OpWalkerInfo) procCtx, null, null, false); + OpWalkerInfo owi = (OpWalkerInfo) procCtx; + boolean hasUnpushedPredicates = mergeWithChildrenPred(nd, owi, null, null, false); + if (HiveConf.getBoolVar(owi.getParseContext().getConf(), + HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { + if (hasUnpushedPredicates) { + ExprWalkerInfo unpushedPreds = mergeChildrenPred(nd, owi, null, false); + return createFilter((Operator)nd, unpushedPreds, owi); + } + } return null; } @@ -318,20 +368,21 @@ * @param ignoreAliases * @throws SemanticException */ - protected void mergeWithChildrenPred(Node nd, OpWalkerInfo owi, + protected boolean mergeWithChildrenPred(Node nd, OpWalkerInfo owi, ExprWalkerInfo ewi, Set aliases, boolean ignoreAliases) throws SemanticException { + boolean hasUnpushedPredicates = false; if (nd.getChildren() == null || nd.getChildren().size() > 1) { // ppd for multi-insert query is not yet implemented // no-op for leafs - return; + return hasUnpushedPredicates; } Operator op = (Operator) nd; ExprWalkerInfo childPreds = owi .getPrunedPreds((Operator) nd.getChildren() .get(0)); if (childPreds == null) { - return; + return hasUnpushedPredicates; } if (ewi == null) { ewi = new ExprWalkerInfo(); @@ -344,12 +395,41 @@ // input8.q ExprWalkerInfo extractPushdownPreds = ExprWalkerProcFactory .extractPushdownPreds(owi, op, e.getValue()); + if (!extractPushdownPreds.getNonFinalCandidates().isEmpty()) { + hasUnpushedPredicates = true; + } ewi.merge(extractPushdownPreds); logExpr(nd, extractPushdownPreds); } } owi.putPrunedPreds((Operator) nd, ewi); + return hasUnpushedPredicates; } + + protected ExprWalkerInfo mergeChildrenPred(Node nd, OpWalkerInfo owi, + Set excludedAliases, boolean ignoreAliases) + throws SemanticException { + if (nd.getChildren() == null) { + return null; + } + Operator op = (Operator) nd; + ExprWalkerInfo ewi = new ExprWalkerInfo(); + for (Operator child : op.getChildOperators()) { + ExprWalkerInfo childPreds = owi.getPrunedPreds(child); + if (childPreds == null) { + continue; + } + for (Entry> e : childPreds + .getFinalCandidates().entrySet()) { + if (ignoreAliases || excludedAliases == null || + !excludedAliases.contains(e.getKey()) || e.getKey() == null) { + ewi.addPushDowns(e.getKey(), e.getValue()); + logExpr(nd, ewi); + } + } + } + return ewi; + } } protected static Object createFilter(Operator op, @@ -386,7 +466,7 @@ if (condn == null) { return null; } - + if (op instanceof TableScanOperator) { boolean pushFilterToStorage; HiveConf hiveConf = owi.getParseContext().getConf(); @@ -423,6 +503,24 @@ } OpParseContext ctx = new OpParseContext(inputRR); owi.put(output, ctx); + + if (HiveConf.getBoolVar(owi.getParseContext().getConf(), + HiveConf.ConfVars.HIVEPPDREMOVEDUPLICATEFILTERS)) { + // remove the candidate filter ops + for (FilterOperator fop : owi.getCandidateFilterOps()) { + List> children = fop.getChildOperators(); + List> parents = fop.getParentOperators(); + for (Operator parent : parents) { + parent.getChildOperators().addAll(children); + parent.removeChild(fop); + } + for (Operator child : children) { + child.getParentOperators().addAll(parents); + child.removeParent(fop); + } + } + owi.getCandidateFilterOps().clear(); + } return output; } @@ -506,7 +604,7 @@ tableScanDesc.setFilterExpr(decomposed.pushedPredicate); return decomposed.residualPredicate; } - + public static NodeProcessor getFilterProc() { return new FilterPPD(); } @@ -536,7 +634,7 @@ } public static NodeProcessor getUDTFProc() { - return new ScriptPPD(); + return new UDTFPPD(); } public static NodeProcessor getLVFProc() { Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (revision 1145463) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/OpWalkerInfo.java (working copy) @@ -18,9 +18,12 @@ package org.apache.hadoop.hive.ql.ppd; import java.io.Serializable; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx; @@ -39,11 +42,13 @@ private final HashMap, ExprWalkerInfo> opToPushdownPredMap; private final Map, OpParseContext> opToParseCtxMap; private final ParseContext pGraphContext; + private final List candidateFilterOps; public OpWalkerInfo(ParseContext pGraphContext) { this.pGraphContext = pGraphContext; opToParseCtxMap = pGraphContext.getOpParseCtx(); opToPushdownPredMap = new HashMap, ExprWalkerInfo>(); + candidateFilterOps = new ArrayList(); } public ExprWalkerInfo getPrunedPreds(Operator op) { @@ -67,4 +72,13 @@ public ParseContext getParseContext() { return pGraphContext; } + + public List getCandidateFilterOps() { + return candidateFilterOps; + } + + public void addCandidateFilterOp(FilterOperator fop) { + candidateFilterOps.add(fop); + } + } Index: ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (revision 1145463) +++ ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java (working copy) @@ -17,13 +17,10 @@ */ package org.apache.hadoop.hive.ql.ppd; -import java.io.Serializable; import java.util.ArrayList; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; -import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker; import org.apache.hadoop.hive.ql.lib.DefaultRuleDispatcher; import org.apache.hadoop.hive.ql.lib.Dispatcher; @@ -33,7 +30,6 @@ import org.apache.hadoop.hive.ql.lib.Rule; import org.apache.hadoop.hive.ql.lib.RuleRegExp; import org.apache.hadoop.hive.ql.optimizer.Transform; -import org.apache.hadoop.hive.ql.parse.OpParseContext; import org.apache.hadoop.hive.ql.parse.ParseContext; import org.apache.hadoop.hive.ql.parse.SemanticException; @@ -73,12 +69,10 @@ public class PredicatePushDown implements Transform { private ParseContext pGraphContext; - private HashMap, OpParseContext> opToParseCtxMap; @Override public ParseContext transform(ParseContext pctx) throws SemanticException { pGraphContext = pctx; - opToParseCtxMap = pGraphContext.getOpParseCtx(); // create a the context for walking operators OpWalkerInfo opWalkerInfo = new OpWalkerInfo(pGraphContext); Index: ql/src/test/queries/clientpositive/ppd1.q =================================================================== --- ql/src/test/queries/clientpositive/ppd1.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd1.q (working copy) @@ -1,7 +1,14 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT src.key as c3 from src where src.key > '2'; SELECT src.key as c3 from src where src.key > '2'; +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT src.key as c3 from src where src.key > '2'; + +SELECT src.key as c3 from src where src.key > '2'; Index: ql/src/test/queries/clientpositive/ppd_clusterby.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_clusterby.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_clusterby.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key; @@ -7,3 +8,13 @@ EXPLAIN SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1;; SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1; + +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key; +SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key; + +EXPLAIN +SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1;; +SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1; Index: ql/src/test/queries/clientpositive/ppd_constant_expr.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_constant_expr.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_constant_expr.q (working copy) @@ -1,4 +1,6 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; + CREATE TABLE ppd_constant_expr(c1 STRING, c2 INT, c3 DOUBLE) STORED AS TEXTFILE; EXPLAIN @@ -10,4 +12,13 @@ SELECT ppd_constant_expr.* FROM ppd_constant_expr; +set hive.ppd.remove.duplicatefilters=true; +EXPLAIN +FROM src1 +INSERT OVERWRITE TABLE ppd_constant_expr SELECT 4 + NULL, src1.key - NULL, NULL + NULL; + +FROM src1 +INSERT OVERWRITE TABLE ppd_constant_expr SELECT 4 + NULL, src1.key - NULL, NULL + NULL; + +SELECT ppd_constant_expr.* FROM ppd_constant_expr; Index: ql/src/test/queries/clientpositive/ppd_gby.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_gby.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_gby.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT src1.c1 @@ -10,3 +11,16 @@ FROM (SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400'); + +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT src1.c1 +FROM +(SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 +WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400'); + +SELECT src1.c1 +FROM +(SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 +WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400'); Index: ql/src/test/queries/clientpositive/ppd_gby2.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_gby2.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_gby2.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT max(src1.c1), src1.c2 @@ -12,3 +13,18 @@ (SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1 WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400') GROUP BY src1.c2; + +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT max(src1.c1), src1.c2 +FROM +(SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1 +WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400') +GROUP BY src1.c2; + +SELECT max(src1.c1), src1.c2 +FROM +(SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1 +WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400') +GROUP BY src1.c2; Index: ql/src/test/queries/clientpositive/ppd_gby_join.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_gby_join.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_gby_join.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT src1.c1, count(1) @@ -10,3 +11,14 @@ WHERE src1.c1 > '20' AND (src1.c2 < 'val_50' OR src1.c1 > '2') AND (src2.c3 > '50' OR src1.c1 < '50') AND (src2.c3 <> '4') GROUP BY src1.c1; +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT src1.c1, count(1) +FROM +(SELECT src.key AS c1, src.value AS c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key AS c3, src.value AS c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' AND (src1.c2 < 'val_50' OR src1.c1 > '2') AND (src2.c3 > '50' OR src1.c1 < '50') AND (src2.c3 <> '4') +GROUP BY src1.c1; Index: ql/src/test/queries/clientpositive/ppd_join.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_join.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_join.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT src1.c1, src2.c4 @@ -16,3 +17,22 @@ (SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 ON src1.c1 = src2.c3 AND src1.c1 < '400' WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4'); + +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4'); + +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4'); Index: ql/src/test/queries/clientpositive/ppd_join2.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_join2.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_join2.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT src1.c1, src2.c4 @@ -22,3 +23,28 @@ (SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3 ON src1.c2 = src3.c6 WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13); + +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '305' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3 +ON src1.c2 = src3.c6 +WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13); + +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '305' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3 +ON src1.c2 = src3.c6 +WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13); Index: ql/src/test/queries/clientpositive/ppd_join3.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_join3.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_join3.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT src1.c1, src2.c4 @@ -22,3 +23,28 @@ (SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3 ON src1.c1 = src3.c5 WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1'); + +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '12' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3 +ON src1.c1 = src3.c5 +WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1'); + +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '12' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3 +ON src1.c1 = src3.c5 +WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1'); Index: ql/src/test/queries/clientpositive/ppd_multi_insert.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_multi_insert.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_multi_insert.q (working copy) @@ -1,7 +1,6 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; - - CREATE TABLE mi1(key INT, value STRING) STORED AS TEXTFILE; CREATE TABLE mi2(key INT, value STRING) STORED AS TEXTFILE; CREATE TABLE mi3(key INT) PARTITIONED BY(ds STRING, hr STRING) STORED AS TEXTFILE; @@ -25,5 +24,22 @@ dfs -cat ../build/ql/test/data/warehouse/mi4.out/*; +set hive.ppd.remove.duplicatefilters=true; +EXPLAIN +FROM src a JOIN src b ON (a.key = b.key) +INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100 +INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200 +INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300 +INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300; +FROM src a JOIN src b ON (a.key = b.key) +INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100 +INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200 +INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300 +INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300; + +SELECT mi1.* FROM mi1; +SELECT mi2.* FROM mi2; +SELECT mi3.* FROM mi3; +dfs -cat ../build/ql/test/data/warehouse/mi4.out/*; Index: ql/src/test/queries/clientpositive/ppd_outer_join1.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_outer_join1.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_outer_join1.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN FROM @@ -17,3 +18,21 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN + FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; + + FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25; Index: ql/src/test/queries/clientpositive/ppd_outer_join2.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_outer_join2.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_outer_join2.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN FROM @@ -17,3 +18,21 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'; +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN + FROM + src a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'; + + FROM + src a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'; Index: ql/src/test/queries/clientpositive/ppd_outer_join3.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_outer_join3.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_outer_join3.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN FROM @@ -17,5 +18,21 @@ SELECT a.key, a.value, b.key, b.value WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'; +set hive.ppd.remove.duplicatefilters=true; +EXPLAIN + FROM + src a + FULL OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'; + FROM + src a + FULL OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25'; Index: ql/src/test/queries/clientpositive/ppd_outer_join4.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_outer_join4.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_outer_join4.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN FROM @@ -23,3 +24,27 @@ SELECT a.key, a.value, b.key, b.value, c.key WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 ; +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN + FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + RIGHT OUTER JOIN + src c + ON (a.key = c.key) + SELECT a.key, a.value, b.key, b.value, c.key + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 ; + + FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + RIGHT OUTER JOIN + src c + ON (a.key = c.key) + SELECT a.key, a.value, b.key, b.value, c.key + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 ; Index: ql/src/test/queries/clientpositive/ppd_random.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_random.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_random.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT src1.c1, src2.c4 @@ -8,3 +9,14 @@ (SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 ON src1.c1 = src2.c3 WHERE rand() > 0.5; + +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 +WHERE rand() > 0.5; Index: ql/src/test/queries/clientpositive/ppd_transform.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_transform.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_transform.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN FROM ( @@ -17,3 +18,21 @@ ) tmap SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; + +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100; Index: ql/src/test/queries/clientpositive/ppd_udf_case.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_udf_case.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_udf_case.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN SELECT * @@ -25,3 +26,30 @@ END ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr; +set hive.ppd.remove.duplicatefilters=true; + +EXPLAIN +SELECT * +FROM srcpart a JOIN srcpart b +ON a.key = b.key +WHERE a.ds = '2008-04-08' AND + b.ds = '2008-04-08' AND + CASE a.key + WHEN '27' THEN TRUE + WHEN '38' THEN FALSE + ELSE NULL + END +ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr; + +SELECT * +FROM srcpart a JOIN srcpart b +ON a.key = b.key +WHERE a.ds = '2008-04-08' AND + b.ds = '2008-04-08' AND + CASE a.key + WHEN '27' THEN TRUE + WHEN '38' THEN FALSE + ELSE NULL + END +ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr; + Index: ql/src/test/queries/clientpositive/ppd_union.q =================================================================== --- ql/src/test/queries/clientpositive/ppd_union.q (revision 1145463) +++ ql/src/test/queries/clientpositive/ppd_union.q (working copy) @@ -1,4 +1,5 @@ set hive.optimize.ppd=true; +set hive.ppd.remove.duplicatefilters=false; EXPLAIN FROM ( @@ -17,4 +18,21 @@ SELECT unioned_query.* WHERE key > '4' and value > 'val_4'; +set hive.ppd.remove.duplicatefilters=true; +EXPLAIN +FROM ( + FROM src select src.key, src.value WHERE src.key < '100' + UNION ALL + FROM src SELECT src.* WHERE src.key > '150' +) unioned_query +SELECT unioned_query.* + WHERE key > '4' and value > 'val_4'; + +FROM ( + FROM src select src.key, src.value WHERE src.key < '100' + UNION ALL + FROM src SELECT src.* WHERE src.key > '150' +) unioned_query +SELECT unioned_query.* + WHERE key > '4' and value > 'val_4'; Index: ql/src/test/results/clientpositive/auto_join0.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join0.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join0.q.out (working copy) @@ -52,26 +52,22 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [] - 1 [] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [] + 1 [] + Position of Big Table: 0 Stage: Stage-5 Map Reduce @@ -83,46 +79,42 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [] - 1 [] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [] + 1 [] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -220,26 +212,22 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [] - 1 [] - Position of Big Table: 1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [] + 1 [] + Position of Big Table: 1 Stage: Stage-6 Map Reduce @@ -251,46 +239,42 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [] - 1 [] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [] + 1 [] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -304,25 +288,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: 0 + value expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string a:src2:src TableScan alias: src @@ -330,25 +310,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: 1 + value expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join11.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join11.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join11.q.out (working copy) @@ -73,41 +73,37 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} - 1 {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} + 1 {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col3 - Position of Big Table: 0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -159,19 +155,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 1 Stage: Stage-5 Map Reduce @@ -235,22 +227,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src Index: ql/src/test/results/clientpositive/auto_join12.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join12.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join12.q.out (working copy) @@ -82,21 +82,17 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-5 Map Reduce @@ -113,44 +109,40 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 0 to 2 - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + outputColumnNames: _col0, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col3 - Position of Big Table: 0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -205,21 +197,17 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + Position of Big Table: 1 src3:src TableScan alias: src @@ -232,21 +220,17 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + Position of Big Table: 1 Stage: Stage-6 Map Reduce @@ -320,21 +304,17 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + Position of Big Table: 2 src2:src TableScan alias: src @@ -372,44 +352,40 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 0 to 2 - condition expressions: - 0 {_col0} - 1 {_col1} - 2 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - 2 [Column[_col0]] + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {_col0} + 1 {_col1} + 2 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + 2 [Column[_col0]] + outputColumnNames: _col0, _col3 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col3 - Position of Big Table: 2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -428,22 +404,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src @@ -478,19 +450,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join13.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join13.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join13.q.out (working copy) @@ -85,28 +85,24 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - outputColumnNames: _col0, _col2, _col3 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col2, _col3 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -132,19 +128,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col3} {_col0} - 1 - handleSkewJoin: false - keys: - 0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()] - 1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col3} {_col0} + 1 + handleSkewJoin: false + keys: + 0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()] + 1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()] + Position of Big Table: 0 Stage: Stage-6 Map Reduce @@ -246,41 +238,37 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col3} {_col0} - 1 - handleSkewJoin: false - keys: - 0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()] - 1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()] - outputColumnNames: _col1, _col2 - Position of Big Table: 1 - Select Operator - expressions: - expr: _col2 - type: string - expr: _col1 - type: string - outputColumnNames: _col2, _col1 - Group By Operator - aggregations: - expr: sum(hash(_col2,_col1)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col3} {_col0} + 1 + handleSkewJoin: false + keys: + 0 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0], Column[_col2]()] + 1 [class org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge(Column[_col0]()] + outputColumnNames: _col1, _col2 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col2 + type: string + expr: _col1 + type: string + outputColumnNames: _col2, _col1 + Group By Operator + aggregations: + expr: sum(hash(_col2,_col1)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -314,19 +302,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - Reduce Output Operator - key expressions: - expr: UDFToDouble(_col0) - type: double - sort order: + - Map-reduce partition columns: - expr: UDFToDouble(_col0) - type: double - tag: 1 + Reduce Output Operator + key expressions: + expr: UDFToDouble(_col0) + type: double + sort order: + + Map-reduce partition columns: + expr: UDFToDouble(_col0) + type: double + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -375,19 +359,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 1 Stage: Stage-10 Map Reduce @@ -438,22 +418,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src Index: ql/src/test/results/clientpositive/auto_join14.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join14.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join14.q.out (working copy) @@ -38,19 +38,15 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-4 Map Reduce @@ -62,44 +58,40 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {key} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col0, _col5 - Position of Big Table: 0 + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col5 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col5 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -130,33 +122,7 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 1 - - Stage: Stage-5 - Map Reduce - Alias -> Map Operator Tree: - srcpart - TableScan - alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 + HashTable Sink Operator condition expressions: 0 {key} 1 {value} @@ -164,30 +130,48 @@ keys: 0 [Column[key]] 1 [Column[key]] - outputColumnNames: _col0, _col5 Position of Big Table: 1 + + Stage: Stage-5 + Map Reduce + Alias -> Map Operator Tree: + srcpart + TableScan + alias: srcpart + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col5 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col5 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -201,29 +185,6 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - srcpart - TableScan - alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean Reduce Output Operator key expressions: expr: key @@ -232,10 +193,25 @@ Map-reduce partition columns: expr: key type: string - tag: 1 + tag: 0 value expressions: - expr: value + expr: key type: string + srcpart + TableScan + alias: srcpart + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: value + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join16.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join16.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join16.q.out (working copy) @@ -65,56 +65,44 @@ predicate: expr: ((key > 10) and (key > 20)) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 > 20) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0], Column[_col1]] - 1 [Column[key], Column[value]] - outputColumnNames: _col0, _col3 - Position of Big Table: 0 - Filter Operator - predicate: - expr: (_col3 < 200) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0], Column[_col1]] + 1 [Column[key], Column[value]] + outputColumnNames: _col0, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col3 + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -161,30 +149,22 @@ predicate: expr: ((key > 10) and (key > 20)) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 > 20) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0], Column[_col1]] - 1 [Column[key], Column[value]] - Position of Big Table: 1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0], Column[_col1]] + 1 [Column[key], Column[value]] + Position of Big Table: 1 Stage: Stage-5 Map Reduce @@ -208,29 +188,25 @@ 1 [Column[key], Column[value]] outputColumnNames: _col0, _col3 Position of Big Table: 1 - Filter Operator - predicate: - expr: (_col3 < 200) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col3 + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -244,37 +220,29 @@ predicate: expr: ((key > 10) and (key > 20)) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 > 20) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string tab TableScan alias: tab @@ -307,29 +275,25 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col3 - Filter Operator - predicate: - expr: (_col3 < 200) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col3 - Group By Operator - aggregations: - expr: sum(hash(_col0,_col3)) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col3 + Group By Operator + aggregations: + expr: sum(hash(_col0,_col3)) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/auto_join19.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join19.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join19.q.out (working copy) @@ -68,32 +68,28 @@ 1 [Column[key]] outputColumnNames: _col0, _col2, _col3, _col7 Position of Big Table: 0 - Filter Operator - predicate: - expr: (((_col2 = '2008-04-08') or (_col2 = '2008-04-09')) and ((_col3 = '12') or (_col3 = '11'))) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -148,32 +144,28 @@ 1 [Column[key]] outputColumnNames: _col0, _col2, _col3, _col7 Position of Big Table: 1 - Filter Operator - predicate: - expr: (((_col2 = '2008-04-08') or (_col2 = '2008-04-09')) and ((_col3 = '12') or (_col3 = '11'))) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -223,32 +215,28 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col2, _col3, _col7 - Filter Operator - predicate: - expr: (((_col2 = '2008-04-08') or (_col2 = '2008-04-09')) and ((_col3 = '12') or (_col3 = '11'))) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) Index: ql/src/test/results/clientpositive/auto_join20.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join20.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join20.q.out (working copy) @@ -47,25 +47,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 a:src2 TableScan alias: src2 @@ -236,24 +232,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string a:src2 TableScan alias: src2 @@ -398,25 +390,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 a:src2 TableScan alias: src2 @@ -424,25 +412,21 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 20)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 20)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 Stage: Stage-6 Map Reduce @@ -595,24 +579,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string a:src2 TableScan alias: src2 @@ -620,24 +600,20 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string a:src3 TableScan alias: src3 Index: ql/src/test/results/clientpositive/auto_join21.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join21.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join21.q.out (working copy) @@ -54,25 +54,21 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 {(key < 10)} - 1 - 2 {(key < 10)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 {(key < 10)} + 1 + 2 {(key < 10)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 Stage: Stage-5 Map Reduce @@ -192,24 +188,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 Index: ql/src/test/results/clientpositive/auto_join23.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join23.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join23.q.out (working copy) @@ -67,27 +67,23 @@ 1 [] outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -171,27 +167,23 @@ 1 [] outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 1 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -237,27 +229,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/auto_join27.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join27.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join27.q.out (working copy) @@ -119,19 +119,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - HashTable Sink Operator - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-6 Map Reduce @@ -268,34 +264,30 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 - 1 - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 1 - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 + 1 + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 1 + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -336,19 +328,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join28.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join28.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join28.q.out (working copy) @@ -44,24 +44,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -209,24 +205,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -234,24 +226,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 2 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + expr: value + type: string Reduce Operator Tree: Join Operator condition map: @@ -365,24 +353,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -407,24 +391,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 2 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + expr: value + type: string Reduce Operator Tree: Join Operator condition map: @@ -538,24 +518,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 Index: ql/src/test/results/clientpositive/auto_join29.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join29.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join29.q.out (working copy) @@ -54,25 +54,21 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 {(key < 10)} - 1 - 2 {(key < 10)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 {(key < 10)} + 1 + 2 {(key < 10)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 Stage: Stage-5 Map Reduce @@ -192,24 +188,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -2921,25 +2913,21 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 {(key < 10)} - 1 - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 {(key < 10)} + 1 + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 src3 TableScan alias: src3 @@ -2947,25 +2935,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 {(key < 10)} - 1 - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 {(key < 10)} + 1 + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 Stage: Stage-5 Map Reduce @@ -3085,24 +3069,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -3110,24 +3090,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 2 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + expr: value + type: string Reduce Operator Tree: Join Operator condition map: @@ -4244,25 +4220,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 {(key > 10)} - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 {(key > 10)} + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 src3 TableScan alias: src3 @@ -4270,25 +4242,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 {(key > 10)} - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 {(key > 10)} + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 Stage: Stage-5 Map Reduce @@ -4391,24 +4359,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -4433,24 +4397,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 2 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + expr: value + type: string Reduce Operator Tree: Join Operator condition map: @@ -5087,25 +5047,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 {(key > 10)} - 2 {(key < 10)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 {(key > 10)} + 2 {(key < 10)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 src2 TableScan alias: src2 @@ -5226,24 +5182,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -6432,21 +6384,17 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 src3 TableScan alias: src3 @@ -6454,21 +6402,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 Stage: Stage-5 Map Reduce @@ -6480,46 +6424,42 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Left Outer Join1 to 2 - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Position of Big Table: 0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + Left Outer Join1 to 2 + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -6582,21 +6522,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 src3 TableScan alias: src3 @@ -6604,21 +6540,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 Stage: Stage-6 Map Reduce @@ -6630,46 +6562,42 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Left Outer Join1 to 2 - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Position of Big Table: 1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + Left Outer Join1 to 2 + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -6683,24 +6611,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -6708,24 +6632,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -6733,24 +6653,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 2 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + expr: value + type: string Reduce Operator Tree: Join Operator condition map: @@ -6835,25 +6751,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 10)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 10)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 src2 TableScan alias: src2 @@ -6861,25 +6773,21 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 - 2 {(key < 10)} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 + 2 {(key < 10)} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 Stage: Stage-5 Map Reduce @@ -6982,24 +6890,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -7007,24 +6911,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -7630,25 +7530,21 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 {(key < 10)} - 1 - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 {(key < 10)} + 1 + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 src3 TableScan alias: src3 @@ -7656,25 +7552,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 {(key < 10)} - 1 - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 {(key < 10)} + 1 + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 Stage: Stage-5 Map Reduce @@ -7794,24 +7686,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -7819,24 +7707,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 2 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + expr: value + type: string Reduce Operator Tree: Join Operator condition map: @@ -7927,25 +7811,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 {(key > 10)} - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 {(key > 10)} + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 src3 TableScan alias: src3 @@ -7953,25 +7833,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 {(key > 10)} - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 {(key > 10)} + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 Stage: Stage-5 Map Reduce @@ -8081,25 +7957,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 {(key > 10)} - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 {(key > 10)} + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 src2 TableScan alias: src2 @@ -8129,50 +8001,46 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Map Join Operator - condition map: - Right Outer Join0 to 1 - Inner Join 1 to 2 - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - filter predicates: - 0 - 1 {(key > 10)} - 2 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Position of Big Table: 2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Right Outer Join0 to 1 + Inner Join 1 to 2 + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + filter predicates: + 0 + 1 {(key > 10)} + 2 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -8186,24 +8054,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -8228,24 +8092,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 2 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + expr: value + type: string Reduce Operator Tree: Join Operator condition map: @@ -8396,21 +8256,17 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 src3 TableScan alias: src3 @@ -8418,21 +8274,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 0 Stage: Stage-5 Map Reduce @@ -8444,46 +8296,42 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 1 to 2 - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Position of Big Table: 0 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 1 to 2 + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -8546,21 +8394,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 src3 TableScan alias: src3 @@ -8568,21 +8412,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 1 Stage: Stage-6 Map Reduce @@ -8594,46 +8434,42 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 1 to 2 - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Position of Big Table: 1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 1 to 2 + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -8654,21 +8490,17 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 src2 TableScan alias: src2 @@ -8676,21 +8508,17 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - Position of Big Table: 2 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + Position of Big Table: 2 Stage: Stage-7 Map Reduce @@ -8702,46 +8530,42 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 1 to 2 - condition expressions: - 0 {key} {value} - 1 {key} {value} - 2 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Position of Big Table: 2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 1 to 2 + condition expressions: + 0 {key} {value} + 1 {key} {value} + 2 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -8755,24 +8579,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -8780,24 +8600,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -8805,24 +8621,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 2 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + expr: value + type: string Reduce Operator Tree: Join Operator condition map: @@ -8860,6 +8672,7 @@ Fetch Operator limit: -1 + PREHOOK: query: SELECT * FROM src src1 JOIN src src2 ON (src1.key = src2.key AND src1.key < 10 AND src2.key > 10) JOIN src src3 ON (src2.key = src3.key AND src3.key < 10) SORT BY src1.key, src1.value, src2.key, src2.value, src3.key, src3.value PREHOOK: type: QUERY PREHOOK: Input: default@src Index: ql/src/test/results/clientpositive/auto_join4.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join4.q.out (working copy) @@ -62,26 +62,22 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-4 Map Reduce @@ -93,29 +89,36 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Left Outer Join0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 0 Select Operator expressions: expr: _col0 @@ -129,34 +132,23 @@ outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -183,31 +175,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -215,31 +203,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join5.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join5.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join5.q.out (working copy) @@ -62,26 +62,22 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 1 Stage: Stage-4 Map Reduce @@ -93,29 +89,36 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Right Outer Join0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 1 Select Operator expressions: expr: _col0 @@ -129,34 +132,23 @@ outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -183,31 +175,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -215,31 +203,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join6.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join6.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join6.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join7.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join7.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join7.q.out (working copy) @@ -62,31 +62,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -94,31 +90,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:c:src3 TableScan alias: src3 @@ -126,31 +118,27 @@ predicate: expr: ((key > 20) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 20) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 2 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/auto_join8.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join8.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join8.q.out (working copy) @@ -62,26 +62,22 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-4 Map Reduce @@ -93,29 +89,29 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Map Join Operator - condition map: - Left Outer Join0 to 1 - condition expressions: - 0 {_col0} {_col1} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[_col0]] - outputColumnNames: _col0, _col1, _col2, _col3 - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Map Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {_col0} {_col1} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[_col0]] + outputColumnNames: _col0, _col1, _col2, _col3 + Position of Big Table: 0 + Filter Operator + predicate: + expr: (_col2 is null and _col0 is not null) + type: boolean Select Operator expressions: expr: _col0 @@ -127,40 +123,36 @@ expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Filter Operator - predicate: - expr: (_col2 is null and _col0 is not null) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string - expr: _col2 - type: string + expr: UDFToInteger(_col2) + type: int expr: _col3 type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: UDFToInteger(_col2) - type: int - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -187,31 +179,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -219,31 +207,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -253,21 +237,21 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Filter Operator - predicate: - expr: (_col2 is null and _col0 is not null) - type: boolean + Filter Operator + predicate: + expr: (_col2 is null and _col0 is not null) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: expr: _col0 Index: ql/src/test/results/clientpositive/auto_join9.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join9.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/auto_join9.q.out (working copy) @@ -66,32 +66,28 @@ 1 [Column[key]] outputColumnNames: _col0, _col2, _col3, _col7 Position of Big Table: 0 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -146,32 +142,28 @@ 1 [Column[key]] outputColumnNames: _col0, _col2, _col3, _col7 Position of Big Table: 1 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Local Work: Map Reduce Local Work @@ -221,32 +213,28 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col2, _col3, _col7 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 PREHOOK: query: FROM srcpart src1 JOIN src src2 ON (src1.key = src2.key) Index: ql/src/test/results/clientpositive/bucket2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/bucket2.q.out (working copy) @@ -190,28 +190,24 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/bucket3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/bucket3.q.out (working copy) @@ -211,36 +211,28 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Filter Operator - predicate: - expr: (ds = '1') - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 + type: int + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col2 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/bucket4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/bucket4.q.out (working copy) @@ -195,23 +195,19 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/bucket_groupby.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_groupby.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/bucket_groupby.q.out (working copy) @@ -44,36 +44,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '100') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -183,36 +179,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -291,36 +283,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean - Select Operator - expressions: - expr: key - type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: length(key) + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: + expr: length(key) + type: int + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -390,36 +378,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean - Select Operator - expressions: - expr: key - type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: abs(length(key)) + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: + expr: abs(length(key)) + type: int + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -491,42 +475,38 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + expr: 3 + type: int + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: 3 + expr: _col1 type: int - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: int - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: int - tag: -1 - value expressions: - expr: _col2 - type: bigint + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: int + tag: -1 + value expressions: + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -607,41 +587,37 @@ subq:clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '101') - type: boolean + Select Operator + expressions: + expr: value + type: string + outputColumnNames: _col0 Select Operator expressions: - expr: value + expr: _col0 type: string outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col0 type: string - outputColumnNames: _col0 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1250,36 +1226,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '102') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1360,36 +1332,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '102') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: value type: string - outputColumnNames: value - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1470,44 +1438,40 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '102') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string expr: value type: string - outputColumnNames: key, value - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1637,36 +1601,32 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '103') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: true + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: true - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -1751,44 +1711,40 @@ clustergroupby TableScan alias: clustergroupby - Filter Operator - predicate: - expr: (ds = '103') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + outputColumnNames: value, key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: value type: string expr: key type: string - outputColumnNames: value, key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: key + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -162,46 +162,41 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-43-58_298_9037696460370256238/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-43-58_298_9037696460370256238/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1306979038 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-32-48_276_7157048291488416942/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-32-48_276_7157048291488416942/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310373168 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -593,51 +588,46 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-44-28_997_7507478881890153288/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-44-28_997_7507478881890153288/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 - transient_lastDdlTime 1306979058 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-33-14_326_2924976190429141214/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-33-14_326_2924976190429141214/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 464 + rawDataSize 8519 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 8983 + transient_lastDdlTime 1310373185 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -114,20 +114,15 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Bucket Mapjoin Context: Alias Bucket Base File Name Mapping: b {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt]} @@ -566,23 +561,27 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col1, _col5 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string outputColumnNames: _col0, _col1, _col5 - Position of Big Table: 1 Select Operator expressions: expr: _col0 @@ -591,47 +590,38 @@ type: string expr: _col5 type: string - outputColumnNames: _col0, _col1, _col5 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-45-37_439_7794471196064019055/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-45-37_439_7794471196064019055/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 0 - rawDataSize 0 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 0 - transient_lastDdlTime 1306979126 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-34-10_535_3760851253870163797/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-34-10_535_3760851253870163797/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 0 + rawDataSize 0 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 + transient_lastDdlTime 1310373241 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -114,20 +114,15 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Bucket Mapjoin Context: Alias Bucket Base File Name Mapping: b {srcbucket22.txt=[srcbucket20.txt, srcbucket22.txt], srcbucket23.txt=[srcbucket21.txt, srcbucket23.txt]} @@ -144,23 +139,27 @@ TableScan alias: a GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col1, _col6 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col6 + type: string outputColumnNames: _col0, _col1, _col6 - Position of Big Table: 0 Select Operator expressions: expr: _col0 @@ -169,42 +168,33 @@ type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-46-14_989_6212335898656818660/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-46-14_989_6212335898656818660/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1306979174 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-34-40_179_2078859592920140854/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-34-40_179_2078859592920140854/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310373280 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -550,20 +540,15 @@ TableScan alias: a GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 1 Bucket Mapjoin Context: Alias Bucket Base File Name Mapping: a {srcbucket20.txt=[srcbucket22.txt], srcbucket21.txt=[srcbucket23.txt], srcbucket22.txt=[srcbucket22.txt], srcbucket23.txt=[srcbucket23.txt]} @@ -582,23 +567,27 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {key} {value} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {key} {value} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col0, _col1, _col6 + Position of Big Table: 1 + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col6 + type: string outputColumnNames: _col0, _col1, _col6 - Position of Big Table: 1 Select Operator expressions: expr: _col0 @@ -607,47 +596,38 @@ type: string expr: _col6 type: string - outputColumnNames: _col0, _col1, _col6 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col6 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-46-45_810_5134529884228638664/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_18-46-45_810_5134529884228638664/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 564 - rawDataSize 10503 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11067 - transient_lastDdlTime 1306979195 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-35-06_760_1506759328569853293/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_01-35-06_760_1506759328569853293/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 564 + rawDataSize 10503 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11067 + transient_lastDdlTime 1310373297 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy) @@ -120,46 +120,41 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/scratchdir/hive_2011-03-20_22-39-58_430_2374140411482084338/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/scratchdir/hive_2011-03-20_22-39-58_430_2374140411482084338/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/Users/krishnak/Projects/hdp/sources/hive-git-apache/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1300685998 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-03-23_03-48-35_145_7336188183229568049/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-03-23_03-48-35_145_7336188183229568049/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300877315 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/case_sensitivity.q.out =================================================================== --- ql/src/test/results/clientpositive/case_sensitivity.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/case_sensitivity.q.out (working copy) @@ -33,25 +33,21 @@ predicate: expr: (lint[0] > 0) type: boolean - Filter Operator - predicate: - expr: (lint[0] > 0) - type: boolean - Select Operator - expressions: - expr: lint[1] - type: int - expr: lintstring[0].MYSTRING - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: lint[1] + type: int + expr: lintstring[0].MYSTRING + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/cast1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/cast1.q.out (working copy) @@ -31,35 +31,31 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: (3 + 2) - type: int - expr: (3.0 + 2) - type: double - expr: (3 + 2.0) - type: double - expr: (3.0 + 2.0) - type: double - expr: ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0))) - type: int - expr: UDFToBoolean(1) - type: boolean - expr: UDFToInteger(true) - type: int - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: (3 + 2) + type: int + expr: (3.0 + 2) + type: double + expr: (3 + 2.0) + type: double + expr: (3.0 + 2.0) + type: double + expr: ((3 + UDFToInteger(2.0)) + UDFToInteger(UDFToShort(0))) + type: int + expr: UDFToBoolean(1) + type: boolean + expr: UDFToInteger(true) + type: int + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/cluster.q.out =================================================================== --- ql/src/test/results/clientpositive/cluster.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/cluster.q.out (working copy) @@ -22,31 +22,27 @@ predicate: expr: (key = 10) type: boolean - Filter Operator - predicate: - expr: (key = 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -94,31 +90,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -166,31 +158,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -238,31 +226,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -310,31 +294,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -382,31 +362,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -454,31 +430,27 @@ predicate: expr: (key = 20) type: boolean - Filter Operator - predicate: - expr: (key = 20) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -640,25 +612,21 @@ 1 {VALUE._col0} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -768,27 +736,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -900,27 +864,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -1030,25 +990,21 @@ 1 {VALUE._col0} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4 - Filter Operator - predicate: - expr: (_col0 = 20) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce @@ -1129,39 +1085,35 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string null-subquery2:unioninput-subquery2:src TableScan alias: src @@ -1169,39 +1121,35 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/combine2.q.out =================================================================== --- ql/src/test/results/clientpositive/combine2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/combine2.q.out (working copy) @@ -86,28 +86,24 @@ combine2 TableScan alias: combine2 - Filter Operator - predicate: - expr: value is not null - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -193,24 +189,19 @@ TableScan alias: combine2 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: value is not null - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/combine2/value=2010-04-21 09%3A45%3A00 [combine2] @@ -715,36 +706,32 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: ds + type: string + outputColumnNames: ds + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: ds type: string - outputColumnNames: ds - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: ds + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/create_view.q.out =================================================================== --- ql/src/test/results/clientpositive/create_view.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/create_view.q.out (working copy) @@ -194,23 +194,19 @@ expr: value type: string outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 = 18) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out =================================================================== --- ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (working copy) @@ -190,28 +190,24 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/filter_join_breaktask.q.out =================================================================== --- ql/src/test/results/clientpositive/filter_join_breaktask.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/filter_join_breaktask.q.out (working copy) @@ -48,28 +48,18 @@ predicate: expr: key is not null type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: key is not null - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 0 - value expressions: - expr: key - type: int + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 0 + value expressions: + expr: key + type: int m TableScan alias: m @@ -79,25 +69,20 @@ predicate: expr: (value is not null and (value <> '')) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 1 - value expressions: - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 1 + value expressions: + expr: value + type: string + expr: ds + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 [f, m] @@ -161,36 +146,21 @@ 1 {VALUE._col1} {VALUE._col2} handleSkewJoin: false outputColumnNames: _col0, _col6, _col7 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col7 = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: _col6 is not null - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 <> '') - type: boolean - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/tomasz/hive_2011-06-01_19-00-51_695_3409169030364207424/-mr-10002 - NumFilesPerFileSink: 1 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - properties: - columns _col0,_col6 - columns.types int,string - escape.delim \ - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/tmp/amarsri/hive_2011-07-11_01-46-22_446_957936931018191228/-mr-10002 + NumFilesPerFileSink: 1 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + properties: + columns _col0,_col6,_col7 + columns.types int,string,string + escape.delim \ + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-2 Map Reduce @@ -212,23 +182,18 @@ TableScan alias: g GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: value - type: string - sort order: + - Map-reduce partition columns: - expr: value - type: string - tag: 1 - value expressions: - expr: value - type: string + Reduce Output Operator + key expressions: + expr: value + type: string + sort order: + + Map-reduce partition columns: + expr: value + type: string + tag: 1 + value expressions: + expr: value + type: string Needs Tagging: true Path -> Alias: file:/tmp/tomasz/hive_2011-06-01_19-00-51_695_3409169030364207424/-mr-10002 [$INTNAME] @@ -240,15 +205,15 @@ input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: - columns _col0,_col6 - columns.types int,string + columns _col0,_col6,_col7 + columns.types int,string,string escape.delim \ input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat properties: - columns _col0,_col6 - columns.types int,string + columns _col0,_col6,_col7 + columns.types int,string,string escape.delim \ pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/filter_join_breaktask/ds=2008-04-08 Partition Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy) @@ -33,46 +33,41 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: count(DISTINCT substr(value, 5)) + expr: sum(substr(value, 5)) + bucketGroup: false + keys: + expr: substr(key, 1, 1) type: string - expr: value + expr: substr(value, 5) type: string - outputColumnNames: key, value - Group By Operator - aggregations: - expr: count(DISTINCT substr(value, 5)) - expr: sum(substr(value, 5)) - bucketGroup: false - keys: - expr: substr(key, 1, 1) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: substr(value, 5) + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint - expr: _col3 - type: double + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint + expr: _col3 + type: double Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] Index: ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (working copy) @@ -33,56 +33,51 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: count(DISTINCT substr(value, 5)) + expr: sum(substr(value, 5)) + expr: sum(DISTINCT substr(value, 5)) + expr: count(DISTINCT value) + bucketGroup: false + keys: + expr: substr(key, 1, 1) type: string + expr: substr(value, 5) + type: string expr: value type: string - outputColumnNames: key, value - Group By Operator - aggregations: - expr: count(DISTINCT substr(value, 5)) - expr: sum(substr(value, 5)) - expr: sum(DISTINCT substr(value, 5)) - expr: count(DISTINCT value) - bucketGroup: false - keys: - expr: substr(key, 1, 1) + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: substr(value, 5) + expr: _col1 type: string - expr: value + expr: _col2 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - sort order: +++ - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col3 - type: bigint - expr: _col4 - type: double - expr: _col5 - type: double - expr: _col6 - type: bigint + sort order: +++ + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col3 + type: bigint + expr: _col4 + type: double + expr: _col5 + type: double + expr: _col6 + type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] Index: ql/src/test/results/clientpositive/groupby_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy) @@ -33,29 +33,24 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Reduce Output Operator + key expressions: + expr: substr(key, 1, 1) type: string - expr: value + expr: substr(value, 5) type: string - outputColumnNames: key, value - Reduce Output Operator - key expressions: - expr: substr(key, 1, 1) - type: string - expr: substr(value, 5) - type: string - sort order: ++ - Map-reduce partition columns: - expr: substr(key, 1, 1) - type: string - tag: -1 + sort order: ++ + Map-reduce partition columns: + expr: substr(key, 1, 1) + type: string + tag: -1 Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] Index: ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (working copy) @@ -33,31 +33,26 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Reduce Output Operator + key expressions: + expr: substr(key, 1, 1) type: string + expr: substr(value, 5) + type: string expr: value type: string - outputColumnNames: key, value - Reduce Output Operator - key expressions: - expr: substr(key, 1, 1) - type: string - expr: substr(value, 5) - type: string - expr: value - type: string - sort order: +++ - Map-reduce partition columns: - expr: substr(key, 1, 1) - type: string - tag: -1 + sort order: +++ + Map-reduce partition columns: + expr: substr(key, 1, 1) + type: string + tag: -1 Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [src] Index: ql/src/test/results/clientpositive/having.q.out =================================================================== --- ql/src/test/results/clientpositive/having.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/having.q.out (working copy) @@ -153,23 +153,19 @@ type: string mode: mergepartial outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 <> 302) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -798,38 +794,34 @@ predicate: expr: (key > 300) type: boolean - Filter Operator - predicate: - expr: (key > 300) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: key, value + Group By Operator + aggregations: + expr: max(value) + bucketGroup: false + keys: expr: key type: string - expr: value - type: string - outputColumnNames: key, value - Group By Operator - aggregations: - expr: max(value) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: string Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/implicit_cast1.q.out =================================================================== --- ql/src/test/results/clientpositive/implicit_cast1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/implicit_cast1.q.out (working copy) @@ -31,23 +31,19 @@ predicate: expr: (a <> 0) type: boolean - Filter Operator - predicate: - expr: (a <> 0) - type: boolean - Select Operator - expressions: - expr: a - type: bigint - expr: b - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: a + type: bigint + expr: b + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/index_auto.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/index_auto.q.out (working copy) @@ -84,28 +84,24 @@ predicate: expr: ((key > 80) and (key < 100)) type: boolean - Filter Operator - predicate: - expr: ((key > 80) and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -185,23 +181,19 @@ predicate: expr: ((key > 80) and (key < 100)) type: boolean - Filter Operator - predicate: - expr: ((key > 80) and (key < 100)) - type: boolean - Select Operator - expressions: - expr: _bucketname - type: string - expr: _offsets - type: array - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offsets + type: array + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-6 Conditional Operator @@ -231,28 +223,24 @@ predicate: expr: ((key > 80) and (key < 100)) type: boolean - Filter Operator - predicate: - expr: ((key > 80) and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/index_auto_file_format.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_file_format.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/index_auto_file_format.q.out (working copy) @@ -48,23 +48,19 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: _bucketname - type: string - expr: _offsets - type: array - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offsets + type: array + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-6 Conditional Operator @@ -94,28 +90,24 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -188,23 +180,19 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: _bucketname - type: string - expr: _offsets - type: array - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offsets + type: array + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-6 Conditional Operator @@ -234,28 +222,24 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/index_auto_multiple.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_multiple.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/index_auto_multiple.q.out (working copy) @@ -71,23 +71,19 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: _bucketname - type: string - expr: _offsets - type: array - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offsets + type: array + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-6 Conditional Operator @@ -117,28 +113,24 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/index_auto_partitioned.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_partitioned.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/index_auto_partitioned.q.out (working copy) @@ -60,23 +60,19 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: ((key = 86) and (ds = '2008-04-09')) - type: boolean - Select Operator - expressions: - expr: _bucketname - type: string - expr: _offsets - type: array - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offsets + type: array + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-6 Conditional Operator @@ -106,28 +102,24 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: ((key = 86) and (ds = '2008-04-09')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/index_auto_unused.q.out =================================================================== --- ql/src/test/results/clientpositive/index_auto_unused.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/index_auto_unused.q.out (working copy) @@ -47,28 +47,24 @@ predicate: expr: ((key > 80) and (key < 100)) type: boolean - Filter Operator - predicate: - expr: ((key > 80) and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -143,28 +139,24 @@ predicate: expr: ((key > 80) and (key < 100)) type: boolean - Filter Operator - predicate: - expr: ((key > 80) and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -239,28 +231,24 @@ predicate: expr: ((key < 10) or (key > 480)) type: boolean - Filter Operator - predicate: - expr: ((key < 10) or (key > 480)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -379,28 +367,24 @@ predicate: expr: ((key > 80) and (key < 100)) type: boolean - Filter Operator - predicate: - expr: ((key > 80) and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator @@ -521,36 +505,32 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (((ds = '2008-04-09') and (hr = 12)) and (key < 10)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - expr: ds + expr: _col1 type: string - expr: hr + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col3 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/index_bitmap3.q.out =================================================================== --- ql/src/test/results/clientpositive/index_bitmap3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/index_bitmap3.q.out (working copy) @@ -1147,39 +1147,35 @@ predicate: expr: (key = 0) type: boolean - Filter Operator - predicate: - expr: (key = 0) - type: boolean - Select Operator - expressions: - expr: _bucketname + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offset + type: bigint + expr: _bitmaps + type: array + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: _offset + expr: _col1 type: bigint - expr: _bitmaps + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: bigint + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + expr: _col2 type: array - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: bigint - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - expr: _col2 - type: array b:default__src_src2_index__ TableScan alias: default__src_src2_index__ @@ -1187,35 +1183,31 @@ predicate: expr: (value = 'val_0') type: boolean - Filter Operator - predicate: - expr: (value = 'val_0') - type: boolean - Select Operator - expressions: - expr: _bucketname + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offset + type: bigint + expr: _bitmaps + type: array + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: _offset + expr: _col1 type: bigint - expr: _bitmaps + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: bigint + tag: 1 + value expressions: + expr: _col2 type: array - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: bigint - tag: 1 - value expressions: - expr: _col2 - type: array Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/index_bitmap_auto.q.out =================================================================== --- ql/src/test/results/clientpositive/index_bitmap_auto.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/index_bitmap_auto.q.out (working copy) @@ -1166,39 +1166,35 @@ predicate: expr: (key = 0) type: boolean - Filter Operator - predicate: - expr: (key = 0) - type: boolean - Select Operator - expressions: - expr: _bucketname + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offset + type: bigint + expr: _bitmaps + type: array + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: _offset + expr: _col1 type: bigint - expr: _bitmaps + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: bigint + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + expr: _col2 type: array - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: bigint - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - expr: _col2 - type: array b:default__src_src2_index__ TableScan alias: default__src_src2_index__ @@ -1206,35 +1202,31 @@ predicate: expr: (value = 'val_0') type: boolean - Filter Operator - predicate: - expr: (value = 'val_0') - type: boolean - Select Operator - expressions: - expr: _bucketname + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offset + type: bigint + expr: _bitmaps + type: array + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: _offset + expr: _col1 type: bigint - expr: _bitmaps + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: bigint + tag: 1 + value expressions: + expr: _col2 type: array - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: bigint - tag: 1 - value expressions: - expr: _col2 - type: array Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out =================================================================== --- ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/index_bitmap_auto_partitioned.q.out (working copy) @@ -59,58 +59,50 @@ predicate: expr: ((key = 86) and (not EWAH_BITMAP_EMPTY(_bitmaps))) type: boolean - Filter Operator - predicate: - expr: ((key = 86) and (ds = '2008-04-09')) - type: boolean + Select Operator + expressions: + expr: _bucketname + type: string + expr: _offset + type: bigint + expr: _bitmaps + type: array + outputColumnNames: _col1, _col2, _col3 Select Operator expressions: - expr: _bucketname + expr: _col1 type: string - expr: _offset + expr: _col2 type: bigint - expr: _bitmaps - type: array - outputColumnNames: _col1, _col2, _col3 - Filter Operator - predicate: - expr: (not EWAH_BITMAP_EMPTY(_col3)) - type: boolean - Select Operator - expressions: - expr: _col1 + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 + Group By Operator + aggregations: + expr: collect_set(_col1) + bucketGroup: false + keys: + expr: _col0 type: string - expr: _col2 - type: bigint + mode: hash outputColumnNames: _col0, _col1 - Select Operator - expressions: + Reduce Output Operator + key expressions: expr: _col0 type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - Group By Operator - aggregations: - expr: collect_set(_col1) - bucketGroup: false - keys: - expr: _col0 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: array + type: array Reduce Operator Tree: Group By Operator aggregations: @@ -154,28 +146,24 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: ((key = 86) and (ds = '2008-04-09')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/input11.q.out =================================================================== --- ql/src/test/results/clientpositive/input11.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input11.q.out (working copy) @@ -33,32 +33,28 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input11_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input11_limit.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input11_limit.q.out (working copy) @@ -30,26 +30,22 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract Limit Index: ql/src/test/results/clientpositive/input14.q.out =================================================================== --- ql/src/test/results/clientpositive/input14.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input14.q.out (working copy) @@ -48,48 +48,48 @@ output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 - type: string + expr: UDFToInteger(_col0) + type: int expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/input18.q.out =================================================================== --- ql/src/test/results/clientpositive/input18.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input18.q.out (working copy) @@ -52,48 +52,48 @@ output info: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Extract - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: regexp_replace(_col1, ' ', '+') + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: regexp_replace(_col1, ' ', '+') - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/input23.q.out =================================================================== --- ql/src/test/results/clientpositive/input23.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input23.q.out (working copy) @@ -106,47 +106,42 @@ 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7, _col8, _col9 - Filter Operator - isSamplingPred: false - predicate: - expr: ((((_col2 = '2008-04-08') and (_col3 = '11')) and (_col8 = '2008-04-08')) and (_col9 = '14')) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col6 - type: string - expr: _col7 - type: string - expr: _col8 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_15-54-28_256_717236059530733661/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_15-54-28_256_717236059530733661/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 - columns.types string:string:string:string:string:string:string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_09-04-29_865_6849719999466698028/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_09-04-29_865_6849719999466698028/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7 + columns.types string:string:string:string:string:string:string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/input24.q.out =================================================================== --- ql/src/test/results/clientpositive/input24.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input24.q.out (working copy) @@ -30,23 +30,19 @@ x TableScan alias: x - Filter Operator - predicate: - expr: (d = '2009-01-01') - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/input25.q.out =================================================================== --- ql/src/test/results/clientpositive/input25.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input25.q.out (working copy) @@ -47,30 +47,26 @@ null-subquery1:subq-subquery1:x TableScan alias: x - Filter Operator - predicate: - expr: (d = '2009-01-01') - type: boolean - Select Operator - expressions: - expr: a - type: int - expr: b - type: int - expr: d - type: string - outputColumnNames: _col0, _col1, _col2 - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: string + Select Operator + expressions: + expr: a + type: int + expr: b + type: int + expr: d + type: string + outputColumnNames: _col0, _col1, _col2 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: int + expr: _col2 + type: string Reduce Operator Tree: Extract Limit @@ -125,30 +121,26 @@ null-subquery2:subq-subquery2:x TableScan alias: x - Filter Operator - predicate: - expr: (d = '2009-02-02') - type: boolean - Select Operator - expressions: - expr: a - type: int - expr: b - type: int - expr: d - type: string - outputColumnNames: _col0, _col1, _col2 - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: int - expr: _col2 - type: string + Select Operator + expressions: + expr: a + type: int + expr: b + type: int + expr: d + type: string + outputColumnNames: _col0, _col1, _col2 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: int + expr: _col2 + type: string Reduce Operator Tree: Extract Limit Index: ql/src/test/results/clientpositive/input26.q.out =================================================================== --- ql/src/test/results/clientpositive/input26.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input26.q.out (working copy) @@ -28,36 +28,32 @@ null-subquery1:subq-subquery1:a TableScan alias: a - Filter Operator - predicate: - expr: ((ds = '2008-04-08') and (hr = '11')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - expr: ds + expr: _col1 type: string - expr: hr + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col3 + type: string Reduce Operator Tree: Extract Limit @@ -120,34 +116,30 @@ predicate: expr: ((ds = '2008-04-08') and (hr = '14')) type: boolean - Filter Operator - predicate: - expr: ((ds = '2008-04-08') and (hr = '14')) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Reduce Operator Tree: Extract Limit Index: ql/src/test/results/clientpositive/input2_limit.q.out =================================================================== --- ql/src/test/results/clientpositive/input2_limit.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input2_limit.q.out (working copy) @@ -22,24 +22,20 @@ predicate: expr: (key < 300) type: boolean - Filter Operator - predicate: - expr: (key < 300) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/input31.q.out =================================================================== --- ql/src/test/results/clientpositive/input31.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input31.q.out (working copy) @@ -35,23 +35,19 @@ predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/input39.q.out =================================================================== --- ql/src/test/results/clientpositive/input39.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input39.q.out (working copy) @@ -121,23 +121,19 @@ 1 {VALUE._col2} handleSkewJoin: false outputColumnNames: _col2, _col7 - Filter Operator - predicate: - expr: ((_col2 = '1') and (_col7 = '1')) - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/input42.q.out =================================================================== --- ql/src/test/results/clientpositive/input42.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input42.q.out (working copy) @@ -19,39 +19,34 @@ TableScan alias: a GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col3 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 type: string - expr: hr + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col3 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] @@ -1199,39 +1194,34 @@ predicate: expr: (key < 200) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (key < 200)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col3 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 type: string - expr: hr + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col3 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [a] Index: ql/src/test/results/clientpositive/input6.q.out =================================================================== --- ql/src/test/results/clientpositive/input6.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input6.q.out (working copy) @@ -33,25 +33,21 @@ predicate: expr: key is null type: boolean - Filter Operator - predicate: - expr: key is null - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input9.q.out =================================================================== --- ql/src/test/results/clientpositive/input9.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input9.q.out (working copy) @@ -33,32 +33,28 @@ predicate: expr: (null = null) type: boolean - Filter Operator - predicate: - expr: (null = null) - type: boolean + Select Operator + expressions: + expr: null + type: string + expr: key + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: null - type: string - expr: key - type: string + expr: _col0 + type: void + expr: UDFToInteger(_col1) + type: int outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: void - expr: UDFToInteger(_col1) - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -35,59 +35,54 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((key < 100) and (ds = '2008-04-08')) and (hr = '12')) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value + expr: _col2 type: string - expr: hr + expr: _col3 type: string - expr: ds - type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_15-59-53_966_4370880936716856186/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_15-59-53_966_4370880936716856186/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,hr,ds - columns.types int:string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382393 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-16-00_167_373354768081382011/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-16-00_167_373354768081382011/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,hr,ds + columns.types int:string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300378560 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [srcpart] Index: ql/src/test/results/clientpositive/input_part5.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part5.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input_part5.q.out (working copy) @@ -33,29 +33,25 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: ((ds = '2008-04-08') and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.tmptable + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.tmptable Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input_part6.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part6.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input_part6.q.out (working copy) @@ -22,28 +22,24 @@ predicate: expr: (ds = ((2008 - 4) - 8)) type: boolean - Filter Operator - predicate: - expr: (ds = ((2008 - 4) - 8)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/input_part7.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part7.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input_part7.q.out (working copy) @@ -34,25 +34,31 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: expr: _col0 type: string expr: _col1 @@ -61,28 +67,17 @@ type: string expr: _col3 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: ++++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + sort order: ++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string null-subquery2:a-subquery2:y TableScan alias: y @@ -92,25 +87,31 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (key < 100)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: expr: _col0 type: string expr: _col1 @@ -119,28 +120,17 @@ type: string expr: _col3 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: ++++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + sort order: ++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [null-subquery1:a-subquery1:x, null-subquery2:a-subquery2:y] Index: ql/src/test/results/clientpositive/input_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part9.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input_part9.q.out (working copy) @@ -24,39 +24,34 @@ predicate: expr: key is not null type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key is not null and (ds = '2008-04-08')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col3 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 type: string - expr: hr + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col3 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [x] Index: ql/src/test/results/clientpositive/input_testxpath2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input_testxpath2.q.out (working copy) @@ -33,27 +33,23 @@ predicate: expr: (lint is not null and (not mstringstring is null)) type: boolean - Filter Operator - predicate: - expr: (lint is not null and (not mstringstring is null)) - type: boolean - Select Operator - expressions: - expr: size(lint) - type: int - expr: size(lintstring) - type: int - expr: size(mstringstring) - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: size(lint) + type: int + expr: size(lintstring) + type: int + expr: size(mstringstring) + type: int + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/input_testxpath4.q.out =================================================================== --- ql/src/test/results/clientpositive/input_testxpath4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/input_testxpath4.q.out (working copy) @@ -107,23 +107,19 @@ predicate: expr: ((mstringstring['key_9'] is not null and lintstring.myint is not null) and lintstring is not null) type: boolean - Filter Operator - predicate: - expr: ((mstringstring['key_9'] is not null and lintstring.myint is not null) and lintstring is not null) - type: boolean - Select Operator - expressions: - expr: mstringstring['key_9'] - type: string - expr: lintstring.myint - type: array - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: mstringstring['key_9'] + type: string + expr: lintstring.myint + type: array + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/join0.q.out =================================================================== --- ql/src/test/results/clientpositive/join0.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join0.q.out (working copy) @@ -33,25 +33,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: 0 + value expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string src2:src TableScan alias: src @@ -59,25 +55,21 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + sort order: + tag: 1 + value expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - sort order: - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join11.q.out =================================================================== --- ql/src/test/results/clientpositive/join11.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join11.q.out (working copy) @@ -37,22 +37,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src Index: ql/src/test/results/clientpositive/join12.q.out =================================================================== --- ql/src/test/results/clientpositive/join12.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join12.q.out (working copy) @@ -43,22 +43,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src @@ -93,19 +89,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join13.q.out =================================================================== --- ql/src/test/results/clientpositive/join13.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join13.q.out (working copy) @@ -44,22 +44,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src @@ -130,19 +126,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 200) - type: boolean - Reduce Output Operator - key expressions: - expr: UDFToDouble(_col0) - type: double - sort order: + - Map-reduce partition columns: - expr: UDFToDouble(_col0) - type: double - tag: 1 + Reduce Output Operator + key expressions: + expr: UDFToDouble(_col0) + type: double + sort order: + + Map-reduce partition columns: + expr: UDFToDouble(_col0) + type: double + tag: 1 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join14.q.out =================================================================== --- ql/src/test/results/clientpositive/join14.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join14.q.out (working copy) @@ -30,29 +30,6 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - srcpart - TableScan - alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean Reduce Output Operator key expressions: expr: key @@ -61,10 +38,25 @@ Map-reduce partition columns: expr: key type: string - tag: 1 + tag: 0 value expressions: - expr: value + expr: key type: string + srcpart + TableScan + alias: srcpart + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: value + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join16.q.out =================================================================== --- ql/src/test/results/clientpositive/join16.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join16.q.out (working copy) @@ -20,37 +20,29 @@ predicate: expr: ((key > 10) and (key > 20)) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 > 20) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + sort order: ++ + Map-reduce partition columns: + expr: _col0 + type: string + expr: _col1 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string tab TableScan alias: tab @@ -83,23 +75,19 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col3 - Filter Operator - predicate: - expr: (_col3 < 200) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/join19.q.out =================================================================== --- ql/src/test/results/clientpositive/join19.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join19.q.out (working copy) @@ -135,27 +135,23 @@ predicate: expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Citation')) type: boolean - Filter Operator - predicate: - expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Citation')) - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string t22:t2 TableScan alias: t2 @@ -163,29 +159,25 @@ predicate: expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') type: boolean - Filter Operator - predicate: - expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + expr: object + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: object + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col1 + type: string t33:t3 TableScan alias: t3 @@ -193,29 +185,25 @@ predicate: expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_from') type: boolean - Filter Operator - predicate: - expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_from') - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + expr: object + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 type: string - expr: object + sort order: + + Map-reduce partition columns: + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: 2 - value expressions: - expr: _col0 - type: string + tag: 2 + value expressions: + expr: _col0 + type: string Reduce Operator Tree: Join Operator condition map: @@ -261,24 +249,20 @@ predicate: expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Author')) type: boolean - Filter Operator - predicate: - expr: ((predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__INSTANCEOF_REL') and (object = 'http://ontos/OntosMiner/Common.English/ontology#Author')) - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 type: string - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 t55:t5 TableScan alias: t5 @@ -286,29 +270,25 @@ predicate: expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_to') type: boolean - Filter Operator - predicate: - expr: (predicate = 'http://www.ontosearch.com/2007/12/ontosofa-ns#_to') - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + expr: object + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: object + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 - value expressions: - expr: _col1 - type: string + tag: 2 + value expressions: + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -356,29 +336,25 @@ predicate: expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') type: boolean - Filter Operator - predicate: - expr: (predicate = 'http://sofa.semanticweb.org/sofa/v1.0/system#__LABEL_REL') - type: boolean - Select Operator - expressions: - expr: subject + Select Operator + expressions: + expr: subject + type: string + expr: object + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: object + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join20.q.out =================================================================== --- ql/src/test/results/clientpositive/join20.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join20.q.out (working copy) @@ -25,24 +25,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -746,24 +742,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -771,24 +763,20 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 Index: ql/src/test/results/clientpositive/join21.q.out =================================================================== --- ql/src/test/results/clientpositive/join21.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join21.q.out (working copy) @@ -40,24 +40,20 @@ predicate: expr: (key > 10) type: boolean - Filter Operator - predicate: - expr: (key > 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 Index: ql/src/test/results/clientpositive/join23.q.out =================================================================== --- ql/src/test/results/clientpositive/join23.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join23.q.out (working copy) @@ -55,27 +55,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -76,75 +76,65 @@ TableScan alias: z GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (hr = 11) - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - Inner Join 0 to 2 - condition expressions: - 0 {key} - 1 {value} - 2 {value} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - 2 [Column[key]] - outputColumnNames: _col0, _col5, _col9 - Position of Big Table: 2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col5 - type: string - expr: _col9 - type: string - outputColumnNames: _col0, _col5, _col9 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-03-56_680_8440893894140638044/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-03-56_680_8440893894140638044/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382636 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Map Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {key} + 1 {value} + 2 {value} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + 2 [Column[key]] + outputColumnNames: _col0, _col5, _col9 + Position of Big Table: 2 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col5 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col5, _col9 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col9 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-25-32_631_7413274741403705596/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-25-32_631_7413274741403705596/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300379132 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/join28.q.out =================================================================== --- ql/src/test/results/clientpositive/join28.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join28.q.out (working copy) @@ -57,23 +57,15 @@ z TableScan alias: z - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - predicate: - expr: (hr = 11) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -139,25 +139,15 @@ TableScan alias: z GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (hr = 11) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col5} {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col1]] - 1 [Column[value]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col5} {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col1]] + 1 [Column[value]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -155,28 +155,18 @@ TableScan alias: z GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (hr = 11) - type: boolean - Reduce Output Operator - key expressions: - expr: value - type: string - sort order: + - Map-reduce partition columns: - expr: value - type: string - tag: 1 - value expressions: - expr: value - type: string + Reduce Output Operator + key expressions: + expr: value + type: string + sort order: + + Map-reduce partition columns: + expr: value + type: string + tag: 1 + value expressions: + expr: value + type: string Needs Tagging: true Path -> Alias: file:/tmp/sdong/hive_2011-02-10_16-05-42_624_7730493356150230026/-mr-10002 [file:/tmp/sdong/hive_2011-02-10_16-05-42_624_7730493356150230026/-mr-10002] Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -69,75 +69,70 @@ predicate: expr: (key < 20) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key < 20) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col1} - 1 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col1} + 1 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + outputColumnNames: _col1, _col2, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col1, _col2, _col3 - Position of Big Table: 0 Select Operator expressions: - expr: _col1 - type: string expr: _col2 type: string expr: _col3 type: string - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-05-57_676_6075966104051319240/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-05-57_676_6075966104051319240/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382757 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + expr: _col1 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-30-27_511_6544642459407208280/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-30-27_511_6544642459407208280/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300379427 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false null-subquery2:subq1-subquery2:x1 TableScan alias: x1 @@ -147,75 +142,70 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {_col1} - 1 {key} {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {_col1} + 1 {key} {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + outputColumnNames: _col1, _col2, _col3 + Position of Big Table: 0 + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string outputColumnNames: _col1, _col2, _col3 - Position of Big Table: 0 Select Operator expressions: - expr: _col1 - type: string expr: _col2 type: string expr: _col3 type: string - outputColumnNames: _col1, _col2, _col3 - Select Operator - expressions: - expr: _col2 - type: string - expr: _col3 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-05-57_676_6075966104051319240/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-05-57_676_6075966104051319240/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382757 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + expr: _col1 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-30-27_511_6544642459407208280/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-30-27_511_6544642459407208280/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300379427 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -50,37 +50,32 @@ predicate: expr: (key < 20) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key < 20) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [null-subquery1:subq1-subquery1:x] @@ -472,37 +467,32 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [null-subquery2:subq1-subquery2:x1] Index: ql/src/test/results/clientpositive/join38.q.out =================================================================== --- ql/src/test/results/clientpositive/join38.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join38.q.out (working copy) @@ -143,44 +143,40 @@ expr: _col15 type: string outputColumnNames: _col1, _col9, _col15 - Filter Operator - predicate: - expr: (_col15 = 111) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: _col1 + type: string + expr: _col9 + type: string + outputColumnNames: _col1, _col9 + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: _col1 type: string expr: _col9 type: string - outputColumnNames: _col1, _col9 - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string expr: _col1 type: string - expr: _col9 + sort order: ++ + Map-reduce partition columns: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - Map-reduce partition columns: - expr: _col0 - type: string - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col2 - type: bigint + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/join39.q.out =================================================================== --- ql/src/test/results/clientpositive/join39.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join39.q.out (working copy) @@ -40,26 +40,22 @@ predicate: expr: (key <= 100) type: boolean - Filter Operator - predicate: - expr: (key <= 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[_col0]] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/join4.q.out =================================================================== --- ql/src/test/results/clientpositive/join4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join4.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join40.q.out =================================================================== --- ql/src/test/results/clientpositive/join40.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join40.q.out (working copy) @@ -39,31 +39,27 @@ predicate: expr: (key <= 100) type: boolean - Filter Operator - predicate: - expr: (key <= 100) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -1812,24 +1808,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -2533,24 +2525,20 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string src2 TableScan alias: src2 @@ -2558,24 +2546,20 @@ predicate: expr: (key < 15) type: boolean - Filter Operator - predicate: - expr: (key < 15) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string src3 TableScan alias: src3 @@ -3266,26 +3250,22 @@ predicate: expr: (key <= 100) type: boolean - Filter Operator - predicate: - expr: (key <= 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - HashTable Sink Operator - condition expressions: - 0 {key} {value} - 1 {_col0} {_col1} - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[_col0]] - Position of Big Table: 0 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + HashTable Sink Operator + condition expressions: + 0 {key} {value} + 1 {_col0} {_col1} + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[_col0]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/join5.q.out =================================================================== --- ql/src/test/results/clientpositive/join5.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join5.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join6.q.out =================================================================== --- ql/src/test/results/clientpositive/join6.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join6.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join7.q.out =================================================================== --- ql/src/test/results/clientpositive/join7.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join7.q.out (working copy) @@ -62,31 +62,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -94,31 +90,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:c:src3 TableScan alias: src3 @@ -126,31 +118,27 @@ predicate: expr: ((key > 20) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 20) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 2 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/join8.q.out =================================================================== --- ql/src/test/results/clientpositive/join8.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join8.q.out (working copy) @@ -52,31 +52,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -84,31 +80,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: @@ -118,21 +110,21 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Filter Operator - predicate: - expr: (_col2 is null and _col0 is not null) - type: boolean + Filter Operator + predicate: + expr: (_col2 is null and _col0 is not null) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: expr: _col0 Index: ql/src/test/results/clientpositive/join9.q.out =================================================================== --- ql/src/test/results/clientpositive/join9.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -150,51 +150,46 @@ 1 {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col2, _col3, _col7 - Filter Operator - isSamplingPred: false - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col7 + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: _col0 + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: _col7 - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-08-44_532_2940878602076923711/-ext-10000 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-02-10_16-08-44_532_2940878602076923711/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1297382924 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-37-30_985_6005635493455371640/-ext-10000 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-17_09-37-30_985_6005635493455371640/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/Users/amarsri/Documents/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1300379850 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -106,46 +106,41 @@ expr: _col11 type: string outputColumnNames: _col0, _col5, _col9, _col10, _col11 - Filter Operator - isSamplingPred: false - predicate: - expr: ((_col10 = '2008-04-08') and (_col11 = 11)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_19-43-21_478_8339235676581259155/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_19-43-21_478_8339235676581259155/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1306982601 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: _col9 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-22-41_288_1522348091919369375/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-22-41_288_1522348091919369375/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310376161 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -598,51 +593,46 @@ expr: _col11 type: string outputColumnNames: _col0, _col5, _col9, _col10, _col11 - Filter Operator - isSamplingPred: false - predicate: - expr: ((_col10 = '2008-04-08') and (_col11 = 11)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col9 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_19-43-39_193_3629767743822800153/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_19-43-39_193_3629767743822800153/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,val2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dest_j1 - name default.dest_j1 - numFiles 1 - numPartitions 0 - numRows 107 - rawDataSize 2018 - serialization.ddl struct dest_j1 { string key, string value, string val2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2125 - transient_lastDdlTime 1306982607 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest_j1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: _col9 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-22-56_227_1785331413587992732/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-22-56_227_1785331413587992732/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,val2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest_j1 + name default.dest_j1 + numFiles 1 + numPartitions 0 + numRows 107 + rawDataSize 2018 + serialization.ddl struct dest_j1 { string key, string value, string val2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 2125 + transient_lastDdlTime 1310376166 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest_j1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/lateral_view_ppd.q.out =================================================================== --- ql/src/test/results/clientpositive/lateral_view_ppd.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/lateral_view_ppd.q.out (working copy) @@ -34,23 +34,19 @@ expr: _col2 type: int outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -69,23 +65,19 @@ expr: _col2 type: int outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -136,19 +128,19 @@ SELECT * : (no compute) Lateral View Join Operator outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: - expr: ((_col0 = '0') and (_col2 = 1)) - type: boolean + Filter Operator + predicate: + expr: ((_col0 = '0') and (_col2 = 1)) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1, _col2 Select Operator expressions: expr: _col1 @@ -171,19 +163,19 @@ function name: explode Lateral View Join Operator outputColumnNames: _col0, _col1, _col2 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1, _col2 - Filter Operator - predicate: - expr: ((_col0 = '0') and (_col2 = 1)) - type: boolean + Filter Operator + predicate: + expr: ((_col0 = '0') and (_col2 = 1)) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1, _col2 Select Operator expressions: expr: _col1 @@ -248,24 +240,20 @@ expr: _col3 type: string outputColumnNames: _col1, _col4, _col2, _col3 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col4 - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col4 + type: int + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -286,24 +274,20 @@ expr: _col3 type: string outputColumnNames: _col1, _col4, _col2, _col3 - Filter Operator - predicate: - expr: ((_col2 = '2008-04-08') and (_col3 = '12')) - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col4 - type: int - outputColumnNames: _col0, _col1 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col4 + type: int + outputColumnNames: _col0, _col1 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -371,23 +355,19 @@ expr: _col0 type: string outputColumnNames: _col1, _col2, _col0 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -406,23 +386,19 @@ expr: _col0 type: string outputColumnNames: _col1, _col2, _col0 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -446,23 +422,19 @@ expr: _col0 type: string outputColumnNames: _col1, _col2, _col0 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Select Operator expressions: expr: array(1,2,3) @@ -481,23 +453,19 @@ expr: _col0 type: string outputColumnNames: _col1, _col2, _col0 - Filter Operator - predicate: - expr: (_col0 = '0') - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: int - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: int + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/load_dyn_part10.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part10.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/load_dyn_part10.q.out (working copy) @@ -44,27 +44,23 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds > '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part10 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part10 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/load_dyn_part13.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part13.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/load_dyn_part13.q.out (working copy) @@ -62,37 +62,33 @@ predicate: expr: (key < 20) type: boolean - Filter Operator - predicate: - expr: (key < 20) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: '22' - type: string - outputColumnNames: _col0, _col1, _col2 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part13 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: '22' + type: string + outputColumnNames: _col0, _col1, _col2 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part13 null-subquery2:s-subquery2:src TableScan alias: src @@ -100,37 +96,33 @@ predicate: expr: ((key > 20) and (key < 40)) type: boolean - Filter Operator - predicate: - expr: ((key > 20) and (key < 40)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: '33' - type: string - outputColumnNames: _col0, _col1, _col2 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part13 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: '33' + type: string + outputColumnNames: _col0, _col1, _col2 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part13 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/load_dyn_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/load_dyn_part2.q.out (working copy) @@ -38,32 +38,28 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds is not null and hr is not null) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + sort order: + Map-reduce partition columns: + expr: _col0 type: string - expr: value + tag: -1 + value expressions: + expr: _col0 type: string - expr: hr + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - sort order: - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Reduce Operator Tree: Extract File Output Operator Index: ql/src/test/results/clientpositive/load_dyn_part3.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/load_dyn_part3.q.out (working copy) @@ -42,29 +42,25 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds is not null and hr is not null) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part3 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part3 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/load_dyn_part4.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/load_dyn_part4.q.out (working copy) @@ -54,29 +54,25 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds is not null and hr is not null) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part4 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part4 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/load_dyn_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part9.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/load_dyn_part9.q.out (working copy) @@ -44,29 +44,25 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds <= '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part9 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part9 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy) @@ -54,25 +54,20 @@ TableScan alias: b GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [a] @@ -209,7 +204,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) + expr: ((_col4 > 15) and (_col4 < 25)) type: boolean Select Operator expressions: @@ -575,7 +570,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col6 > 15)) and (_col6 < 25)) + expr: ((_col6 > 15) and (_col6 < 25)) type: boolean Select Operator expressions: @@ -942,7 +937,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) and (_col6 = '2008-04-08')) + expr: (((_col4 > 15) and (_col4 < 25)) and (_col6 = '2008-04-08')) type: boolean Select Operator expressions: @@ -1225,7 +1220,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col6 > 15)) and (_col6 < 25)) and (_col2 = '2008-04-08')) + expr: ((_col6 > 15) and (_col6 < 25)) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/mapjoin_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin_distinct.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/mapjoin_distinct.q.out (working copy) @@ -31,19 +31,15 @@ d TableScan alias: d - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -51,28 +47,24 @@ c TableScan alias: c - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col1 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col1 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -217,19 +209,15 @@ d TableScan alias: d - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -237,28 +225,24 @@ c TableScan alias: c - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col1 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col1 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -376,19 +360,15 @@ d TableScan alias: d - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -396,28 +376,24 @@ c TableScan alias: c - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col1 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col1 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work @@ -555,19 +531,15 @@ d TableScan alias: d - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - HashTable Sink Operator - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -575,28 +547,24 @@ c TableScan alias: c - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Map Join Operator - condition map: - Inner Join 0 to 1 - condition expressions: - 0 {value} - 1 - handleSkewJoin: false - keys: - 0 [Column[key]] - 1 [Column[key]] - outputColumnNames: _col1 - Position of Big Table: 0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Map Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {value} + 1 + handleSkewJoin: false + keys: + 0 [Column[key]] + 1 [Column[key]] + outputColumnNames: _col1 + Position of Big Table: 0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Local Work: Map Reduce Local Work Index: ql/src/test/results/clientpositive/mapjoin_subquery.q.out =================================================================== --- ql/src/test/results/clientpositive/mapjoin_subquery.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/mapjoin_subquery.q.out (working copy) @@ -46,23 +46,15 @@ z TableScan alias: z - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - predicate: - expr: (hr = 11) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -310,23 +302,15 @@ z TableScan alias: z - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Filter Operator - predicate: - expr: (hr = 11) - type: boolean - HashTable Sink Operator - condition expressions: - 0 {_col0} - 1 {value} - handleSkewJoin: false - keys: - 0 [Column[_col0]] - 1 [Column[key]] - Position of Big Table: 0 + HashTable Sink Operator + condition expressions: + 0 {_col0} + 1 {value} + handleSkewJoin: false + keys: + 0 [Column[_col0]] + 1 [Column[key]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce Index: ql/src/test/results/clientpositive/merge3.q.out =================================================================== --- ql/src/test/results/clientpositive/merge3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/merge3.q.out (working copy) @@ -2268,47 +2268,42 @@ TableScan alias: merge_src_part GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_20-01-51_051_6120737160044680268/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_20-01-51_051_6120737160044680268/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/merge_src_part2 - name default.merge_src_part2 - partition_columns ds - serialization.ddl struct merge_src_part2 { string key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1306983711 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_src_part2 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-35-46_067_3066864399805575043/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-35-46_067_3066864399805575043/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/merge_src_part2 + name default.merge_src_part2 + partition_columns ds + serialization.ddl struct merge_src_part2 { string key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310376946 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_src_part2 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/merge_src_part/ds=2008-04-08 [merge_src_part] @@ -4651,33 +4646,28 @@ TableScan alias: merge_src_part GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + sort order: + Map-reduce partition columns: + expr: _col2 type: string - expr: value + tag: -1 + value expressions: + expr: _col0 type: string - expr: ds + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - sort order: - Map-reduce partition columns: - expr: _col2 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/merge_src_part/ds=2008-04-08 [s:merge_src_part] Index: ql/src/test/results/clientpositive/merge4.q.out =================================================================== --- ql/src/test/results/clientpositive/merge4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/merge4.q.out (working copy) @@ -27,27 +27,23 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part Stage: Stage-5 Conditional Operator @@ -1145,25 +1141,21 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.nzhang_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.nzhang_part Stage: Stage-5 Conditional Operator @@ -2776,25 +2768,21 @@ null-subquery1:s-subquery1:srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/merge_dynamic_partition.q.out =================================================================== --- ql/src/test/results/clientpositive/merge_dynamic_partition.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/merge_dynamic_partition.q.out (working copy) @@ -54,27 +54,23 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-0 Move Operator @@ -660,25 +656,21 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-5 Conditional Operator @@ -1294,29 +1286,25 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: ((ds = '2008-04-08') and (hr = 11)) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out =================================================================== --- ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/merge_dynamic_partition2.q.out (working copy) @@ -70,27 +70,23 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out =================================================================== --- ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/merge_dynamic_partition3.q.out (working copy) @@ -116,29 +116,25 @@ srcpart_merge_dp TableScan alias: srcpart_merge_dp - Filter Operator - predicate: - expr: (ds >= '2008-04-08') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.merge_dynamic_part + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.merge_dynamic_part Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/no_hooks.q.out =================================================================== --- ql/src/test/results/clientpositive/no_hooks.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/no_hooks.q.out (working copy) @@ -52,27 +52,23 @@ 1 {VALUE._col0} {VALUE._col1} handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: ((_col0 < 10) and (_col4 < 10)) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col4 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-2 Map Reduce Index: ql/src/test/results/clientpositive/noalias_subq1.q.out =================================================================== --- ql/src/test/results/clientpositive/noalias_subq1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/noalias_subq1.q.out (working copy) @@ -29,21 +29,17 @@ expr: key type: string outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col1 < 100) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/notable_alias1.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/notable_alias1.q.out (working copy) @@ -30,36 +30,32 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/notable_alias2.q.out =================================================================== --- ql/src/test/results/clientpositive/notable_alias2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/notable_alias2.q.out (working copy) @@ -30,36 +30,32 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/nullgroup.q.out (working copy) @@ -22,23 +22,19 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -96,23 +92,19 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: _col0 - type: bigint + Select Operator + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + mode: hash + outputColumnNames: _col0 + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: _col0 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -171,20 +163,16 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - Reduce Output Operator - sort order: - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 - value expressions: - expr: 1 - type: int + Select Operator + Reduce Output Operator + sort order: + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: @@ -266,17 +254,13 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - Reduce Output Operator - sort order: - tag: -1 - value expressions: - expr: 1 - type: int + Select Operator + Reduce Output Operator + sort order: + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup2.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/nullgroup2.q.out (working copy) @@ -23,36 +23,32 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -147,36 +143,32 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: expr: key type: string - outputColumnNames: key - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -239,27 +231,23 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Reduce Output Operator + key expressions: expr: key type: string - outputColumnNames: key - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: rand() - type: double - tag: -1 - value expressions: - expr: 1 - type: int + sort order: + + Map-reduce partition columns: + expr: rand() + type: double + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: @@ -354,27 +342,23 @@ predicate: expr: (key > 9999) type: boolean - Filter Operator - predicate: - expr: (key > 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: string + outputColumnNames: key + Reduce Output Operator + key expressions: expr: key type: string - outputColumnNames: key - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: -1 - value expressions: - expr: 1 - type: int + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup4.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/nullgroup4.q.out (working copy) @@ -23,39 +23,35 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Group By Operator + aggregations: + expr: count(1) + expr: count(DISTINCT value) + bucketGroup: false + keys: expr: value type: string - outputColumnNames: value - Group By Operator - aggregations: - expr: count(1) - expr: count(DISTINCT value) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint - expr: _col2 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -143,36 +139,32 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Group By Operator + aggregations: + expr: count(1) + expr: count(DISTINCT value) + bucketGroup: false + keys: expr: value type: string - outputColumnNames: value - Group By Operator - aggregations: - expr: count(1) - expr: count(DISTINCT value) - bucketGroup: false - keys: - expr: value + mode: hash + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col1 - type: bigint - expr: _col2 - type: bigint + sort order: + + tag: -1 + value expressions: + expr: _col1 + type: bigint + expr: _col2 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -234,27 +226,23 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Reduce Output Operator + key expressions: expr: value type: string - outputColumnNames: value - Reduce Output Operator - key expressions: - expr: value - type: string - sort order: + - Map-reduce partition columns: - expr: value - type: string - tag: -1 - value expressions: - expr: 1 - type: int + sort order: + + Map-reduce partition columns: + expr: value + type: string + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: @@ -342,24 +330,20 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Reduce Output Operator + key expressions: expr: value type: string - outputColumnNames: value - Reduce Output Operator - key expressions: - expr: value - type: string - sort order: + - tag: -1 - value expressions: - expr: 1 - type: int + sort order: + + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/nullgroup4_multi_distinct.q.out (working copy) @@ -22,43 +22,39 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Group By Operator + aggregations: + expr: count(1) + expr: count(DISTINCT value) + expr: count(DISTINCT substr(value, 5)) + bucketGroup: false + keys: expr: value type: string - outputColumnNames: value - Group By Operator - aggregations: - expr: count(1) - expr: count(DISTINCT value) - expr: count(DISTINCT substr(value, 5)) - bucketGroup: false - keys: - expr: value + expr: substr(value, 5) + type: string + mode: hash + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: substr(value, 5) + expr: _col1 type: string - mode: hash - outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col2 - type: bigint - expr: _col3 - type: bigint - expr: _col4 - type: bigint + sort order: ++ + tag: -1 + value expressions: + expr: _col2 + type: bigint + expr: _col3 + type: bigint + expr: _col4 + type: bigint Reduce Operator Tree: Group By Operator aggregations: @@ -122,26 +118,22 @@ predicate: expr: (key = 9999) type: boolean - Filter Operator - predicate: - expr: (key = 9999) - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: value + type: string + outputColumnNames: value + Reduce Output Operator + key expressions: expr: value type: string - outputColumnNames: value - Reduce Output Operator - key expressions: - expr: value - type: string - expr: substr(value, 5) - type: string - sort order: ++ - tag: -1 - value expressions: - expr: 1 - type: int + expr: substr(value, 5) + type: string + sort order: ++ + tag: -1 + value expressions: + expr: 1 + type: int Reduce Operator Tree: Group By Operator aggregations: Index: ql/src/test/results/clientpositive/nullgroup5.q.out =================================================================== --- ql/src/test/results/clientpositive/nullgroup5.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/nullgroup5.q.out (working copy) @@ -56,38 +56,6 @@ predicate: expr: (ds = '2009-04-05') type: boolean - Filter Operator - predicate: - expr: (ds = '2009-04-05') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - null-subquery2:u-subquery2:y - TableScan - alias: y - Filter Operator - predicate: - expr: (ds = '2009-04-09') - type: boolean Select Operator expressions: expr: key @@ -109,6 +77,30 @@ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + null-subquery2:u-subquery2:y + TableScan + alias: y + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy) @@ -288,7 +288,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) + expr: (((_col4 > 15) and (_col4 < 25)) and ((_col0 > 10) and (_col0 < 20))) type: boolean Select Operator expressions: @@ -650,7 +650,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) and (_col6 = '2008-04-08')) + expr: ((((_col4 > 15) and (_col4 < 25)) and (_col6 = '2008-04-08')) and ((_col0 > 10) and (_col0 < 20))) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/pcr.q.out =================================================================== --- ql/src/test/results/clientpositive/pcr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/pcr.q.out (working copy) @@ -81,35 +81,30 @@ predicate: expr: (key < 5) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds <= '2000-04-09') and (key < 5)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -295,29 +290,24 @@ predicate: expr: ((ds <= '2000-04-09') or (key < 5)) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds <= '2000-04-09') or (key < 5)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -591,35 +581,30 @@ predicate: expr: ((key < 5) and (value <> 'val_2')) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds <= '2000-04-09') and (key < 5)) and (value <> 'val_2')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -807,35 +792,30 @@ predicate: expr: (((ds < '2000-04-09') and (key < 5)) or ((ds > '2000-04-09') and (value = 'val_5'))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds < '2000-04-09') and (key < 5)) or ((ds > '2000-04-09') and (value = 'val_5'))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1025,35 +1005,30 @@ predicate: expr: (((ds < '2000-04-10') and (key < 5)) or ((ds > '2000-04-08') and (value = 'val_5'))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds < '2000-04-10') and (key < 5)) or ((ds > '2000-04-08') and (value = 'val_5'))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1304,35 +1279,30 @@ predicate: expr: (((ds < '2000-04-10') or (key < 5)) and ((ds > '2000-04-08') or (value = 'val_5'))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds < '2000-04-10') or (key < 5)) and ((ds > '2000-04-08') or (value = 'val_5'))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col2 type: string - expr: ds + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col2 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1591,31 +1561,26 @@ predicate: expr: (key = 14) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds = '2000-04-08') or (ds = '2000-04-09')) and (key = 14)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -1786,31 +1751,26 @@ TableScan alias: pcr_t1 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2000-04-08') or (ds = '2000-04-09')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -2021,31 +1981,26 @@ TableScan alias: pcr_t1 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds >= '2000-04-08') or (ds < '2000-04-10')) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -2334,37 +2289,32 @@ predicate: expr: (((ds = '2000-04-08') and (key = 1)) or ((ds = '2000-04-09') and (key = 2))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds = '2000-04-08') and (key = 1)) or ((ds = '2000-04-09') and (key = 2))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - expr: ds + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + sort order: +++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -2537,52 +2487,42 @@ TableScan alias: t1 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2000-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 0 - value expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 0 + value expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string t2 TableScan alias: t2 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2000-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 1 - value expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 1 + value expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [t2, t1] @@ -2822,52 +2762,42 @@ TableScan alias: t1 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2000-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 0 - value expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 0 + value expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string t2 TableScan alias: t2 GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2000-04-09') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: int - sort order: + - Map-reduce partition columns: - expr: key - type: int - tag: 1 - value expressions: - expr: key - type: int - expr: value - type: string - expr: ds - type: string + Reduce Output Operator + key expressions: + expr: key + type: int + sort order: + + Map-reduce partition columns: + expr: key + type: int + tag: 1 + value expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [t1] @@ -3182,37 +3112,32 @@ predicate: expr: (((ds > '2000-04-08') and (ds < '2000-04-11')) or (key = 2)) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds > '2000-04-08') and (ds < '2000-04-11')) or (((ds >= '2000-04-08') and (ds <= '2000-04-11')) and (key = 2))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - expr: ds + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + sort order: +++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -3540,37 +3465,32 @@ predicate: expr: ((ds > '2000-04-08') or ((ds <= '2000-04-09') and (key = 2))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds > '2000-04-08') and (ds < '2000-04-11')) or ((ds <= '2000-04-09') and (key = 2))) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + expr: ds + type: string + outputColumnNames: _col0, _col1, _col2 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - expr: ds + expr: _col2 type: string - outputColumnNames: _col0, _col1, _col2 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col2 - type: string + sort order: +++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/pcr_t1/ds=2000-04-08 [pcr_t1] @@ -4753,29 +4673,24 @@ TableScan alias: srcpart GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (hr = 11)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] @@ -4908,41 +4823,36 @@ predicate: expr: (key = 11) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds = '2008-04-08') and ((hr = '11') or (hr = '12'))) and (key = 11)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col2 type: string - expr: ds + expr: _col3 type: string - expr: hr + sort order: +++ + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] @@ -5122,41 +5032,36 @@ predicate: expr: (key = 11) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((hr = '11') and (key = 11)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + expr: _col2 type: string - expr: ds + expr: _col3 type: string - expr: hr + sort order: +++ + tag: -1 + value expressions: + expr: _col0 type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: +++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [srcpart] Index: ql/src/test/results/clientpositive/ppd1.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd1.q.out (working copy) @@ -46,11 +46,11 @@ PREHOOK: query: SELECT src.key as c3 from src where src.key > '2' PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-18_823_4035711180151393629/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_01-46-31_378_5424397768218246092/-mr-10000 POSTHOOK: query: SELECT src.key as c3 from src where src.key > '2' POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-18_823_4035711180151393629/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_01-46-31_378_5424397768218246092/-mr-10000 238 86 311 @@ -432,3 +432,433 @@ 400 200 97 +PREHOOK: query: EXPLAIN +SELECT src.key as c3 from src where src.key > '2' +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT src.key as c3 from src where src.key > '2' +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '2')))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src + TableScan + alias: src + Filter Operator + predicate: + expr: (key > '2') + type: boolean + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT src.key as c3 from src where src.key > '2' +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_01-46-40_411_5342026921630284187/-mr-10000 +POSTHOOK: query: SELECT src.key as c3 from src where src.key > '2' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_01-46-40_411_5342026921630284187/-mr-10000 +238 +86 +311 +27 +409 +255 +278 +98 +484 +265 +401 +273 +224 +369 +66 +213 +406 +429 +374 +469 +495 +37 +327 +281 +277 +209 +82 +403 +417 +430 +252 +292 +219 +287 +338 +446 +459 +394 +237 +482 +413 +494 +207 +466 +208 +399 +396 +247 +417 +489 +377 +397 +309 +365 +266 +439 +342 +367 +325 +475 +203 +339 +455 +311 +316 +57 +302 +205 +438 +345 +20 +489 +378 +221 +92 +47 +72 +4 +280 +35 +427 +277 +208 +356 +399 +382 +498 +386 +437 +469 +286 +54 +459 +51 +239 +213 +216 +430 +278 +289 +221 +65 +318 +332 +311 +275 +241 +83 +333 +284 +230 +67 +260 +404 +384 +489 +353 +373 +272 +217 +84 +348 +466 +58 +8 +411 +230 +208 +348 +24 +463 +431 +42 +496 +322 +468 +393 +454 +298 +418 +96 +26 +327 +230 +205 +51 +404 +43 +436 +469 +468 +308 +95 +288 +481 +457 +98 +282 +318 +318 +409 +470 +369 +316 +413 +85 +77 +490 +87 +364 +395 +282 +238 +419 +72 +90 +307 +435 +277 +273 +306 +224 +309 +389 +327 +242 +369 +392 +272 +331 +401 +242 +452 +226 +5 +497 +402 +396 +317 +395 +58 +35 +336 +95 +34 +229 +233 +472 +322 +498 +42 +321 +430 +489 +458 +78 +76 +41 +223 +492 +449 +218 +228 +453 +30 +209 +64 +468 +76 +74 +342 +69 +230 +33 +368 +296 +216 +367 +344 +274 +219 +239 +485 +223 +256 +263 +70 +487 +480 +401 +288 +5 +244 +438 +467 +432 +202 +316 +229 +469 +463 +280 +35 +283 +331 +235 +80 +44 +321 +335 +466 +366 +403 +483 +53 +257 +406 +409 +406 +401 +258 +90 +203 +262 +348 +424 +396 +201 +217 +431 +454 +478 +298 +431 +424 +382 +5 +70 +397 +480 +291 +24 +351 +255 +70 +438 +414 +200 +491 +237 +439 +360 +248 +479 +305 +417 +444 +429 +443 +323 +325 +277 +230 +478 +468 +310 +317 +333 +493 +460 +207 +249 +265 +480 +83 +353 +214 +462 +233 +406 +454 +375 +401 +421 +407 +384 +256 +26 +67 +384 +379 +462 +492 +298 +9 +341 +498 +458 +362 +285 +348 +273 +281 +344 +97 +469 +315 +84 +28 +37 +448 +348 +307 +414 +477 +222 +90 +403 +400 +200 +97 Index: ql/src/test/results/clientpositive/ppd_clusterby.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_clusterby.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_clusterby.q.out (working copy) @@ -194,5 +194,195 @@ POSTHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-25_950_4577225496126879083/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-06-51_471_1696629323983265212/-mr-10000 20 val_20 20 +PREHOOK: query: EXPLAIN +SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME SRC) x)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 10)) (TOK_CLUSTERBY (. (TOK_TABLE_OR_COL x) key)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + x + TableScan + alias: x + Filter Operator + predicate: + expr: (key = 10) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + Reduce Operator Tree: + Extract + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-07_755_9086280220307924310/-mr-10000 +POSTHOOK: query: SELECT * FROM SRC x where x.key = 10 CLUSTER BY x.key +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-07_755_9086280220307924310/-mr-10000 +10 val_10 +PREHOOK: query: EXPLAIN +SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME SRC) x) (TOK_TABREF (TOK_TABNAME SRC) y) (= (. (TOK_TABLE_OR_COL x) key) (. (TOK_TABLE_OR_COL y) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL x) value) v1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL y) key))) (TOK_WHERE (= (. (TOK_TABLE_OR_COL x) key) 20)) (TOK_CLUSTERBY (TOK_TABLE_OR_COL v1)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + x + TableScan + alias: x + Filter Operator + predicate: + expr: (key = 20) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + y + TableScan + alias: y + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col4 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-16_261_4934200005006221322/-mr-10002 + Reduce Output Operator + key expressions: + expr: _col1 + type: string + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + Reduce Operator Tree: + Extract + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-16_525_2344085515974635436/-mr-10000 +POSTHOOK: query: SELECT x.key, x.value as v1, y.key FROM SRC x JOIN SRC y ON (x.key = y.key) where x.key = 20 CLUSTER BY v1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-07-16_525_2344085515974635436/-mr-10000 +20 val_20 20 Index: ql/src/test/results/clientpositive/ppd_constant_expr.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_constant_expr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_constant_expr.q.out (working copy) @@ -112,7 +112,7 @@ POSTHOOK: query: SELECT ppd_constant_expr.* FROM ppd_constant_expr POSTHOOK: type: QUERY POSTHOOK: Input: default@ppd_constant_expr -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-37_539_4877038009687877928/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_03-08-49_562_6227549501988296305/-mr-10000 POSTHOOK: Lineage: ppd_constant_expr.c1 EXPRESSION [] POSTHOOK: Lineage: ppd_constant_expr.c2 EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: ppd_constant_expr.c3 EXPRESSION [] @@ -141,3 +141,150 @@ NULL NULL NULL NULL NULL NULL NULL NULL NULL +PREHOOK: query: EXPLAIN +FROM src1 +INSERT OVERWRITE TABLE ppd_constant_expr SELECT 4 + NULL, src1.key - NULL, NULL + NULL +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +FROM src1 +INSERT OVERWRITE TABLE ppd_constant_expr SELECT 4 + NULL, src1.key - NULL, NULL + NULL +POSTHOOK: type: QUERY +POSTHOOK: Lineage: ppd_constant_expr.c1 EXPRESSION [] +POSTHOOK: Lineage: ppd_constant_expr.c2 EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: ppd_constant_expr.c3 EXPRESSION [] +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src1))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME ppd_constant_expr))) (TOK_SELECT (TOK_SELEXPR (+ 4 TOK_NULL)) (TOK_SELEXPR (- (. (TOK_TABLE_OR_COL src1) key) TOK_NULL)) (TOK_SELEXPR (+ TOK_NULL TOK_NULL))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-5 depends on stages: Stage-1 , consists of Stage-4, Stage-3 + Stage-4 + Stage-0 depends on stages: Stage-4, Stage-3 + Stage-2 depends on stages: Stage-0 + Stage-3 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src1 + TableScan + alias: src1 + Select Operator + expressions: + expr: (4 + null) + type: int + expr: (key - null) + type: double + expr: (null + null) + type: tinyint + outputColumnNames: _col0, _col1, _col2 + Select Operator + expressions: + expr: _col0 + type: int + expr: UDFToInteger(_col1) + type: int + expr: UDFToDouble(_col2) + type: double + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.ppd_constant_expr + + Stage: Stage-5 + Conditional Operator + + Stage: Stage-4 + Move Operator + files: + hdfs directory: true + destination: pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-22_03-08-50_015_6932944037979128451/-ext-10000 + + Stage: Stage-0 + Move Operator + tables: + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.ppd_constant_expr + + Stage: Stage-2 + Stats-Aggr Operator + + Stage: Stage-3 + Map Reduce + Alias -> Map Operator Tree: + pfile:/Users/amarsri/Documents/workspace/hive/build/ql/scratchdir/hive_2011-03-22_03-08-50_015_6932944037979128451/-ext-10002 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.ppd_constant_expr + + +PREHOOK: query: FROM src1 +INSERT OVERWRITE TABLE ppd_constant_expr SELECT 4 + NULL, src1.key - NULL, NULL + NULL +PREHOOK: type: QUERY +PREHOOK: Input: default@src1 +PREHOOK: Output: default@ppd_constant_expr +POSTHOOK: query: FROM src1 +INSERT OVERWRITE TABLE ppd_constant_expr SELECT 4 + NULL, src1.key - NULL, NULL + NULL +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src1 +POSTHOOK: Output: default@ppd_constant_expr +POSTHOOK: Lineage: ppd_constant_expr.c1 EXPRESSION [] +POSTHOOK: Lineage: ppd_constant_expr.c1 EXPRESSION [] +POSTHOOK: Lineage: ppd_constant_expr.c2 EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: ppd_constant_expr.c2 EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: ppd_constant_expr.c3 EXPRESSION [] +POSTHOOK: Lineage: ppd_constant_expr.c3 EXPRESSION [] +PREHOOK: query: SELECT ppd_constant_expr.* FROM ppd_constant_expr +PREHOOK: type: QUERY +PREHOOK: Input: default@ppd_constant_expr +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_03-08-58_708_3073565512234424163/-mr-10000 +POSTHOOK: query: SELECT ppd_constant_expr.* FROM ppd_constant_expr +POSTHOOK: type: QUERY +POSTHOOK: Input: default@ppd_constant_expr +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_03-08-58_708_3073565512234424163/-mr-10000 +POSTHOOK: Lineage: ppd_constant_expr.c1 EXPRESSION [] +POSTHOOK: Lineage: ppd_constant_expr.c1 EXPRESSION [] +POSTHOOK: Lineage: ppd_constant_expr.c2 EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: ppd_constant_expr.c2 EXPRESSION [(src1)src1.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: ppd_constant_expr.c3 EXPRESSION [] +POSTHOOK: Lineage: ppd_constant_expr.c3 EXPRESSION [] +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL +NULL NULL NULL Index: ql/src/test/results/clientpositive/ppd_gby.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_gby.q.out (working copy) @@ -104,14 +104,14 @@ WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400') PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-38_112_8267432549996239027/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-12-18_104_5370641899416935796/-mr-10000 POSTHOOK: query: SELECT src1.c1 FROM (SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400') POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-38_112_8267432549996239027/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-12-18_104_5370641899416935796/-mr-10000 val_201 val_202 val_203 @@ -241,3 +241,242 @@ val_397 val_399 val_4 +PREHOOK: query: EXPLAIN +SELECT src1.c1 +FROM +(SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 +WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400') +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT src1.c1 +FROM +(SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 +WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400') +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c1) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) key)) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) value) 'val_10')) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) value)))) src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1))) (TOK_WHERE (and (> (. (TOK_TABLE_OR_COL src1) c1) 'val_200') (or (> (. (TOK_TABLE_OR_COL src1) c2) 30) (< (. (TOK_TABLE_OR_COL src1) c1) 'val_400')))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src1:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((value > 'val_10') and (value > 'val_200')) + type: boolean + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + outputColumnNames: value, key + Group By Operator + aggregations: + expr: count(key) + bucketGroup: false + keys: + expr: value + type: string + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint + Reduce Operator Tree: + Group By Operator + aggregations: + expr: count(VALUE._col0) + bucketGroup: false + keys: + expr: KEY._col0 + type: string + mode: mergepartial + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: ((_col0 > 'val_200') and ((_col1 > 30) or (_col0 < 'val_400'))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: _col0 + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT src1.c1 +FROM +(SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 +WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400') +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-12-29_361_5339487985983121248/-mr-10000 +POSTHOOK: query: SELECT src1.c1 +FROM +(SELECT src.value as c1, count(src.key) as c2 from src where src.value > 'val_10' group by src.value) src1 +WHERE src1.c1 > 'val_200' and (src1.c2 > 30 or src1.c1 < 'val_400') +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-12-29_361_5339487985983121248/-mr-10000 +val_201 +val_202 +val_203 +val_205 +val_207 +val_208 +val_209 +val_213 +val_214 +val_216 +val_217 +val_218 +val_219 +val_221 +val_222 +val_223 +val_224 +val_226 +val_228 +val_229 +val_230 +val_233 +val_235 +val_237 +val_238 +val_239 +val_24 +val_241 +val_242 +val_244 +val_247 +val_248 +val_249 +val_252 +val_255 +val_256 +val_257 +val_258 +val_26 +val_260 +val_262 +val_263 +val_265 +val_266 +val_27 +val_272 +val_273 +val_274 +val_275 +val_277 +val_278 +val_28 +val_280 +val_281 +val_282 +val_283 +val_284 +val_285 +val_286 +val_287 +val_288 +val_289 +val_291 +val_292 +val_296 +val_298 +val_30 +val_302 +val_305 +val_306 +val_307 +val_308 +val_309 +val_310 +val_311 +val_315 +val_316 +val_317 +val_318 +val_321 +val_322 +val_323 +val_325 +val_327 +val_33 +val_331 +val_332 +val_333 +val_335 +val_336 +val_338 +val_339 +val_34 +val_341 +val_342 +val_344 +val_345 +val_348 +val_35 +val_351 +val_353 +val_356 +val_360 +val_362 +val_364 +val_365 +val_366 +val_367 +val_368 +val_369 +val_37 +val_373 +val_374 +val_375 +val_377 +val_378 +val_379 +val_382 +val_384 +val_386 +val_389 +val_392 +val_393 +val_394 +val_395 +val_396 +val_397 +val_399 +val_4 Index: ql/src/test/results/clientpositive/ppd_gby2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_gby2.q.out (working copy) @@ -110,7 +110,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/sdong/hive_2011-02-10_16-56-41_891_721328983378577073/-mr-10002 + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-17-03_240_3343733212968194961/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -159,7 +159,7 @@ GROUP BY src1.c2 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-41_988_2096999785618851738/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-17-04_077_883376489676525041/-mr-10000 POSTHOOK: query: SELECT max(src1.c1), src1.c2 FROM (SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1 @@ -167,9 +167,180 @@ GROUP BY src1.c2 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-41_988_2096999785618851738/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-17-04_077_883376489676525041/-mr-10000 val_4 1 val_399 2 val_396 3 val_277 4 val_348 5 +PREHOOK: query: EXPLAIN +SELECT max(src1.c1), src1.c2 +FROM +(SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1 +WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400') +GROUP BY src1.c2 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT max(src1.c1), src1.c2 +FROM +(SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1 +WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400') +GROUP BY src1.c2 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c1) (TOK_SELEXPR (TOK_FUNCTION count (. (TOK_TABLE_OR_COL src) key)) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) value) 'val_10')) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src) value)))) src1)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_FUNCTION max (. (TOK_TABLE_OR_COL src1) c1))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c2))) (TOK_WHERE (AND (> (. (TOK_TABLE_OR_COL src1) c1) 'val_200') (OR (> (. (TOK_TABLE_OR_COL src1) c2) 30) (< (. (TOK_TABLE_OR_COL src1) c1) 'val_400')))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src1) c2)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src1:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((value > 'val_10') and (value > 'val_200')) + type: boolean + Select Operator + expressions: + expr: value + type: string + expr: key + type: string + outputColumnNames: value, key + Group By Operator + aggregations: + expr: count(key) + bucketGroup: false + keys: + expr: value + type: string + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint + Reduce Operator Tree: + Group By Operator + aggregations: + expr: count(VALUE._col0) + bucketGroup: false + keys: + expr: KEY._col0 + type: string + mode: mergepartial + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: ((_col0 > 'val_200') and ((_col1 > 30) or (_col0 < 'val_400'))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 + Group By Operator + aggregations: + expr: max(_col0) + bucketGroup: false + keys: + expr: _col1 + type: bigint + mode: hash + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-17-23_094_9196364983578023762/-mr-10002 + Reduce Output Operator + key expressions: + expr: _col0 + type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: bigint + tag: -1 + value expressions: + expr: _col1 + type: string + Reduce Operator Tree: + Group By Operator + aggregations: + expr: max(VALUE._col0) + bucketGroup: false + keys: + expr: KEY._col0 + type: bigint + mode: mergepartial + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: _col1 + type: string + expr: _col0 + type: bigint + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT max(src1.c1), src1.c2 +FROM +(SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1 +WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400') +GROUP BY src1.c2 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-17-23_367_2772975119752922926/-mr-10000 +POSTHOOK: query: SELECT max(src1.c1), src1.c2 +FROM +(SELECT src.value AS c1, count(src.key) AS c2 FROM src WHERE src.value > 'val_10' GROUP BY src.value) src1 +WHERE src1.c1 > 'val_200' AND (src1.c2 > 30 OR src1.c1 < 'val_400') +GROUP BY src1.c2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-17-23_367_2772975119752922926/-mr-10000 +val_4 1 +val_399 2 +val_396 3 +val_277 4 +val_348 5 Index: ql/src/test/results/clientpositive/ppd_gby_join.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_gby_join.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_gby_join.q.out (working copy) @@ -131,7 +131,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/sdong/hive_2011-02-10_16-56-49_887_123690558182632791/-mr-10002 + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-14-53_815_8679319347089159838/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -173,3 +173,166 @@ limit: -1 +PREHOOK: query: EXPLAIN +SELECT src1.c1, count(1) +FROM +(SELECT src.key AS c1, src.value AS c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key AS c3, src.value AS c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' AND (src1.c2 < 'val_50' OR src1.c1 > '2') AND (src2.c3 > '50' OR src1.c1 < '50') AND (src2.c3 <> '4') +GROUP BY src1.c1 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT src1.c1, count(1) +FROM +(SELECT src.key AS c1, src.value AS c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key AS c3, src.value AS c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' AND (src1.c2 < 'val_50' OR src1.c1 > '2') AND (src2.c3 > '50' OR src1.c1 < '50') AND (src2.c3 <> '4') +GROUP BY src1.c1 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '1')))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '2')))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) '400')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (TOK_FUNCTION count 1))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL src1) c1) '20') (OR (< (. (TOK_TABLE_OR_COL src1) c2) 'val_50') (> (. (TOK_TABLE_OR_COL src1) c1) '2'))) (OR (> (. (TOK_TABLE_OR_COL src2) c3) '50') (< (. (TOK_TABLE_OR_COL src1) c1) '50'))) (<> (. (TOK_TABLE_OR_COL src2) c3) '4'))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL src1) c1)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src1:src + TableScan + alias: src + Filter Operator + predicate: + expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + src2:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((key > '2') and (key <> '4')) + type: boolean + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col2 + Filter Operator + predicate: + expr: ((((_col0 > '20') and ((_col1 < 'val_50') or (_col0 > '2'))) and ((_col2 > '50') or (_col0 < '50'))) and (_col2 <> '4')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + outputColumnNames: _col0 + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: + expr: _col0 + type: string + mode: hash + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-14-54_718_7102065647101283820/-mr-10002 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint + Reduce Operator Tree: + Group By Operator + aggregations: + expr: count(VALUE._col0) + bucketGroup: false + keys: + expr: KEY._col0 + type: string + mode: mergepartial + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + Index: ql/src/test/results/clientpositive/ppd_join.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_join.q.out (working copy) @@ -136,7 +136,7 @@ WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4') PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-50_301_2741043495905569604/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-19-54_404_4275400666711143229/-mr-10000 POSTHOOK: query: SELECT src1.c1, src2.c4 FROM (SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1 @@ -146,7 +146,7 @@ WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4') POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-50_301_2741043495905569604/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-19-54_404_4275400666711143229/-mr-10000 200 val_200 200 val_200 200 val_200 @@ -565,3 +565,558 @@ 399 val_399 399 val_399 399 val_399 +PREHOOK: query: EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4') +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4') +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '1')))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '2')))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) '400')))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4))) (TOK_WHERE (and (and (and (> (. (TOK_TABLE_OR_COL src1) c1) '20') (or (< (. (TOK_TABLE_OR_COL src1) c2) 'val_50') (> (. (TOK_TABLE_OR_COL src1) c1) '2'))) (or (> (. (TOK_TABLE_OR_COL src2) c3) '50') (< (. (TOK_TABLE_OR_COL src1) c1) '50'))) (<> (. (TOK_TABLE_OR_COL src2) c3) '4'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src1:src + TableScan + alias: src + Filter Operator + predicate: + expr: (((key > '1') and (key < '400')) and ((key > '20') and ((value < 'val_50') or (key > '2')))) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + src2:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((key > '2') and (key <> '4')) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col2, _col3 + Filter Operator + predicate: + expr: ((((_col0 > '20') and ((_col1 < 'val_50') or (_col0 > '2'))) and ((_col2 > '50') or (_col0 < '50'))) and (_col2 <> '4')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4') +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-20-04_832_4076756605206612832/-mr-10000 +POSTHOOK: query: SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key > '1' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +WHERE src1.c1 > '20' and (src1.c2 < 'val_50' or src1.c1 > '2') and (src2.c3 > '50' or src1.c1 < '50') and (src2.c3 <> '4') +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-20-04_832_4076756605206612832/-mr-10000 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +201 val_201 +202 val_202 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +214 val_214 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +218 val_218 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +222 val_222 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +226 val_226 +228 val_228 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +235 val_235 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +241 val_241 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +244 val_244 +247 val_247 +248 val_248 +249 val_249 +252 val_252 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +257 val_257 +258 val_258 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +260 val_260 +262 val_262 +263 val_263 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +266 val_266 +27 val_27 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +274 val_274 +275 val_275 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +28 val_28 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +283 val_283 +284 val_284 +285 val_285 +286 val_286 +287 val_287 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +289 val_289 +291 val_291 +292 val_292 +296 val_296 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +30 val_30 +302 val_302 +305 val_305 +306 val_306 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +308 val_308 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +310 val_310 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +315 val_315 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +323 val_323 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +33 val_33 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +332 val_332 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +335 val_335 +336 val_336 +338 val_338 +339 val_339 +34 val_34 +341 val_341 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +345 val_345 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +351 val_351 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +356 val_356 +360 val_360 +362 val_362 +364 val_364 +365 val_365 +366 val_366 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +368 val_368 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +373 val_373 +374 val_374 +375 val_375 +377 val_377 +378 val_378 +379 val_379 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +386 val_386 +389 val_389 +392 val_392 +393 val_393 +394 val_394 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +399 val_399 +399 val_399 +399 val_399 +399 val_399 Index: ql/src/test/results/clientpositive/ppd_join2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_join2.q.out (working copy) @@ -214,7 +214,7 @@ WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13) PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-54_335_1550783828530525327/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-22-23_609_8022409171851868425/-mr-10000 POSTHOOK: query: SELECT src1.c1, src2.c4 FROM (SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1 @@ -227,7 +227,7 @@ WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13) POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-56-54_335_1550783828530525327/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-22-23_609_8022409171851868425/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -1746,3 +1746,1735 @@ 399 val_399 399 val_399 4 val_4 +PREHOOK: query: EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '305' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3 +ON src1.c2 = src3.c6 +WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13) +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '305' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3 +ON src1.c2 = src3.c6 +WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13) +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '302')))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '305')))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) '400'))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c6)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '306')))) src3) (= (. (TOK_TABLE_OR_COL src1) c2) (. (TOK_TABLE_OR_COL src3) c6)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4))) (TOK_WHERE (and (and (and (and (<> (. (TOK_TABLE_OR_COL src1) c1) '311') (or (<> (. (TOK_TABLE_OR_COL src1) c2) 'val_50') (> (. (TOK_TABLE_OR_COL src1) c1) '1'))) (or (<> (. (TOK_TABLE_OR_COL src2) c3) '10') (<> (. (TOK_TABLE_OR_COL src1) c1) '10'))) (<> (. (TOK_TABLE_OR_COL src2) c3) '14')) (<> (TOK_FUNCTION sqrt (. (TOK_TABLE_OR_COL src3) c5)) 13))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src1:src + TableScan + alias: src + Filter Operator + predicate: + expr: (((key <> '302') and (key < '400')) and ((key <> '311') and ((value <> 'val_50') or (key > '1')))) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + src2:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((key <> '305') and (key <> '14')) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + $INTNAME + Reduce Output Operator + key expressions: + expr: _col1 + type: string + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: 0 + value expressions: + expr: _col2 + type: string + expr: _col3 + type: string + expr: _col0 + type: string + expr: _col1 + type: string + src3:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((key <> '306') and (sqrt(key) <> 13)) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 + type: string + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} + 1 {VALUE._col0} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Filter Operator + predicate: + expr: (((((_col2 <> '311') and ((_col3 <> 'val_50') or (_col2 > '1'))) and ((_col0 <> '10') or (_col2 <> '10'))) and (_col0 <> '14')) and (sqrt(_col4) <> 13)) + type: boolean + Select Operator + expressions: + expr: _col2 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '305' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3 +ON src1.c2 = src3.c6 +WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13) +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-22-44_680_6270114164732441996/-mr-10000 +POSTHOOK: query: SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '302' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '305' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '306' ) src3 +ON src1.c2 = src3.c6 +WHERE src1.c1 <> '311' and (src1.c2 <> 'val_50' or src1.c1 > '1') and (src2.c3 <> '10' or src1.c1 <> '10') and (src2.c3 <> '14') and (sqrt(src3.c5) <> 13) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-22-44_680_6270114164732441996/-mr-10000 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +105 val_105 +11 val_11 +111 val_111 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +114 val_114 +116 val_116 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +126 val_126 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +131 val_131 +133 val_133 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +136 val_136 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +143 val_143 +145 val_145 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +150 val_150 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +153 val_153 +155 val_155 +156 val_156 +157 val_157 +158 val_158 +160 val_160 +162 val_162 +163 val_163 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +166 val_166 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +168 val_168 +17 val_17 +170 val_170 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +177 val_177 +178 val_178 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +180 val_180 +181 val_181 +183 val_183 +186 val_186 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +189 val_189 +19 val_19 +190 val_190 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +192 val_192 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +194 val_194 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +196 val_196 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +2 val_2 +20 val_20 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +201 val_201 +202 val_202 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +214 val_214 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +218 val_218 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +222 val_222 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +226 val_226 +228 val_228 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +235 val_235 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +241 val_241 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +244 val_244 +247 val_247 +248 val_248 +249 val_249 +252 val_252 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +257 val_257 +258 val_258 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +260 val_260 +262 val_262 +263 val_263 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +266 val_266 +27 val_27 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +274 val_274 +275 val_275 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +28 val_28 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +283 val_283 +284 val_284 +285 val_285 +286 val_286 +287 val_287 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +289 val_289 +291 val_291 +292 val_292 +296 val_296 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +30 val_30 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +308 val_308 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +310 val_310 +315 val_315 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +323 val_323 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +33 val_33 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +332 val_332 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +335 val_335 +336 val_336 +338 val_338 +339 val_339 +34 val_34 +341 val_341 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +345 val_345 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +351 val_351 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +356 val_356 +360 val_360 +362 val_362 +364 val_364 +365 val_365 +366 val_366 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +368 val_368 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +373 val_373 +374 val_374 +375 val_375 +377 val_377 +378 val_378 +379 val_379 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +386 val_386 +389 val_389 +392 val_392 +393 val_393 +394 val_394 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +4 val_4 Index: ql/src/test/results/clientpositive/ppd_join3.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_join3.q.out (working copy) @@ -175,7 +175,7 @@ WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1') PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-01_987_2716591092944855571/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-25-08_238_6256269886850027827/-mr-10000 POSTHOOK: query: SELECT src1.c1, src2.c4 FROM (SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1 @@ -188,7 +188,7 @@ WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1') POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-01_987_2716591092944855571/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-25-08_238_6256269886850027827/-mr-10000 100 val_100 100 val_100 100 val_100 @@ -1764,3 +1764,1753 @@ 399 val_399 399 val_399 399 val_399 +PREHOOK: query: EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '12' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3 +ON src1.c1 = src3.c5 +WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1') +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '12' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3 +ON src1.c1 = src3.c5 +WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1') +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '11')))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '12')))) src2) (AND (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)) (< (. (TOK_TABLE_OR_COL src1) c1) '400'))) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c5) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c6)) (TOK_WHERE (<> (. (TOK_TABLE_OR_COL src) key) '13')))) src3) (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src3) c5)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4))) (TOK_WHERE (and (and (and (and (> (. (TOK_TABLE_OR_COL src1) c1) '0') (or (<> (. (TOK_TABLE_OR_COL src1) c2) 'val_500') (> (. (TOK_TABLE_OR_COL src1) c1) '1'))) (or (> (. (TOK_TABLE_OR_COL src2) c3) '10') (<> (. (TOK_TABLE_OR_COL src1) c1) '10'))) (<> (. (TOK_TABLE_OR_COL src2) c3) '4')) (<> (. (TOK_TABLE_OR_COL src3) c5) '1'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src1:src + TableScan + alias: src + Filter Operator + predicate: + expr: (((key <> '11') and (key < '400')) and ((key > '0') and ((value <> 'val_500') or (key > '1')))) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + src2:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((key <> '12') and (key <> '4')) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + src3:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((key <> '13') and (key <> '1')) + type: boolean + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 + value expressions: + expr: _col0 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + Inner Join 0 to 2 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + 2 {VALUE._col0} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + Filter Operator + predicate: + expr: (((((_col0 > '0') and ((_col1 <> 'val_500') or (_col0 > '1'))) and ((_col2 > '10') or (_col0 <> '10'))) and (_col2 <> '4')) and (_col4 <> '1')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '12' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3 +ON src1.c1 = src3.c5 +WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1') +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-25-20_069_5364583495609185769/-mr-10000 +POSTHOOK: query: SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src where src.key <> '11' ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key <> '12' ) src2 +ON src1.c1 = src2.c3 AND src1.c1 < '400' +JOIN +(SELECT src.key as c5, src.value as c6 from src where src.key <> '13' ) src3 +ON src1.c1 = src3.c5 +WHERE src1.c1 > '0' and (src1.c2 <> 'val_500' or src1.c1 > '1') and (src2.c3 > '10' or src1.c1 <> '10') and (src2.c3 <> '4') and (src3.c5 <> '1') +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-25-20_069_5364583495609185769/-mr-10000 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +100 val_100 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +105 val_105 +111 val_111 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +114 val_114 +116 val_116 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +126 val_126 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +131 val_131 +133 val_133 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +136 val_136 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +143 val_143 +145 val_145 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +150 val_150 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +153 val_153 +155 val_155 +156 val_156 +157 val_157 +158 val_158 +160 val_160 +162 val_162 +163 val_163 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +166 val_166 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +168 val_168 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +17 val_17 +170 val_170 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +177 val_177 +178 val_178 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +180 val_180 +181 val_181 +183 val_183 +186 val_186 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +189 val_189 +19 val_19 +190 val_190 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +192 val_192 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +194 val_194 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +196 val_196 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +2 val_2 +20 val_20 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +200 val_200 +201 val_201 +202 val_202 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +203 val_203 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +205 val_205 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +207 val_207 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +208 val_208 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +209 val_209 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +213 val_213 +214 val_214 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +216 val_216 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +217 val_217 +218 val_218 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +219 val_219 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +221 val_221 +222 val_222 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +223 val_223 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +224 val_224 +226 val_226 +228 val_228 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +229 val_229 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +233 val_233 +235 val_235 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +237 val_237 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +238 val_238 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +239 val_239 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +241 val_241 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +242 val_242 +244 val_244 +247 val_247 +248 val_248 +249 val_249 +252 val_252 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +255 val_255 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +256 val_256 +257 val_257 +258 val_258 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +260 val_260 +262 val_262 +263 val_263 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +265 val_265 +266 val_266 +27 val_27 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +272 val_272 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +273 val_273 +274 val_274 +275 val_275 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +278 val_278 +28 val_28 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +280 val_280 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +281 val_281 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +282 val_282 +283 val_283 +284 val_284 +285 val_285 +286 val_286 +287 val_287 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +288 val_288 +289 val_289 +291 val_291 +292 val_292 +296 val_296 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +298 val_298 +30 val_30 +302 val_302 +305 val_305 +306 val_306 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +307 val_307 +308 val_308 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +309 val_309 +310 val_310 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +311 val_311 +315 val_315 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +316 val_316 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +317 val_317 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +318 val_318 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +321 val_321 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +322 val_322 +323 val_323 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +325 val_325 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +327 val_327 +33 val_33 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +331 val_331 +332 val_332 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +333 val_333 +335 val_335 +336 val_336 +338 val_338 +339 val_339 +34 val_34 +341 val_341 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +342 val_342 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +344 val_344 +345 val_345 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +351 val_351 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +353 val_353 +356 val_356 +360 val_360 +362 val_362 +364 val_364 +365 val_365 +366 val_366 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +367 val_367 +368 val_368 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +369 val_369 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +373 val_373 +374 val_374 +375 val_375 +377 val_377 +378 val_378 +379 val_379 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +382 val_382 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +384 val_384 +386 val_386 +389 val_389 +392 val_392 +393 val_393 +394 val_394 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +395 val_395 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +396 val_396 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +397 val_397 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 +399 val_399 Index: ql/src/test/results/clientpositive/ppd_multi_insert.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_multi_insert.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_multi_insert.q.out (working copy) @@ -1315,3 +1315,1330 @@ val_498 val_498 val_498 +PREHOOK: query: EXPLAIN +FROM src a JOIN src b ON (a.key = b.key) +INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100 +INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200 +INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300 +INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +FROM src a JOIN src b ON (a.key = b.key) +INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100 +INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200 +INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300 +INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300 +POSTHOOK: type: QUERY +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi1))) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME a)))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL a) key) 100))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi2))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 100) (< (. (TOK_TABLE_OR_COL a) key) 200)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME mi3) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr '12')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key))) (TOK_WHERE (and (>= (. (TOK_TABLE_OR_COL a) key) 200) (< (. (TOK_TABLE_OR_COL a) key) 300)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR '../build/ql/test/data/warehouse/mi4.out')) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value))) (TOK_WHERE (>= (. (TOK_TABLE_OR_COL a) key) 300)))) + +STAGE DEPENDENCIES: + Stage-4 is a root stage + Stage-0 depends on stages: Stage-4 + Stage-5 depends on stages: Stage-0 + Stage-1 depends on stages: Stage-4 + Stage-6 depends on stages: Stage-1 + Stage-2 depends on stages: Stage-4 + Stage-7 depends on stages: Stage-2 + Stage-3 depends on stages: Stage-4 + +STAGE PLANS: + Stage: Stage-4 + Map Reduce + Alias -> Map Operator Tree: + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 + handleSkewJoin: false + outputColumnNames: _col0, _col1 + Filter Operator + predicate: + expr: (_col0 < 100) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi1 + Filter Operator + predicate: + expr: ((_col0 >= 100) and (_col0 < 200)) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 2 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi2 + Filter Operator + predicate: + expr: ((_col0 >= 200) and (_col0 < 300)) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + outputColumnNames: _col0 + Select Operator + expressions: + expr: UDFToInteger(_col0) + type: int + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 3 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi3 + Filter Operator + predicate: + expr: (_col0 >= 300) + type: boolean + Select Operator + expressions: + expr: _col1 + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 4 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Move Operator + tables: + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi1 + + Stage: Stage-5 + Stats-Aggr Operator + + Stage: Stage-1 + Move Operator + tables: + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi2 + + Stage: Stage-6 + Stats-Aggr Operator + + Stage: Stage-2 + Move Operator + tables: + partition: + ds 2008-04-08 + hr 12 + replace: true + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.mi3 + + Stage: Stage-7 + Stats-Aggr Operator + + Stage: Stage-3 + Move Operator + files: + hdfs directory: true + destination: ../build/ql/test/data/warehouse/mi4.out + + +PREHOOK: query: FROM src a JOIN src b ON (a.key = b.key) +INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100 +INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200 +INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300 +INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: ../build/ql/test/data/warehouse/mi4.out +PREHOOK: Output: default@mi1 +PREHOOK: Output: default@mi2 +PREHOOK: Output: default@mi3@ds=2008-04-08/hr=12 +POSTHOOK: query: FROM src a JOIN src b ON (a.key = b.key) +INSERT OVERWRITE TABLE mi1 SELECT a.* WHERE a.key < 100 +INSERT OVERWRITE TABLE mi2 SELECT a.key, a.value WHERE a.key >= 100 and a.key < 200 +INSERT OVERWRITE TABLE mi3 PARTITION(ds='2008-04-08', hr='12') SELECT a.key WHERE a.key >= 200 and a.key < 300 +INSERT OVERWRITE DIRECTORY '../build/ql/test/data/warehouse/mi4.out' SELECT a.value WHERE a.key >= 300 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: ../build/ql/test/data/warehouse/mi4.out +POSTHOOK: Output: default@mi1 +POSTHOOK: Output: default@mi2 +POSTHOOK: Output: default@mi3@ds=2008-04-08/hr=12 +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +PREHOOK: query: SELECT mi1.* FROM mi1 +PREHOOK: type: QUERY +PREHOOK: Input: default@mi1 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-28-03_345_2699966828518452446/-mr-10000 +POSTHOOK: query: SELECT mi1.* FROM mi1 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@mi1 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-28-03_345_2699966828518452446/-mr-10000 +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +0 val_0 +10 val_10 +11 val_11 +12 val_12 +12 val_12 +12 val_12 +12 val_12 +15 val_15 +15 val_15 +15 val_15 +15 val_15 +17 val_17 +18 val_18 +18 val_18 +18 val_18 +18 val_18 +19 val_19 +2 val_2 +20 val_20 +24 val_24 +24 val_24 +24 val_24 +24 val_24 +26 val_26 +26 val_26 +26 val_26 +26 val_26 +27 val_27 +28 val_28 +30 val_30 +33 val_33 +34 val_34 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +35 val_35 +37 val_37 +37 val_37 +37 val_37 +37 val_37 +4 val_4 +41 val_41 +42 val_42 +42 val_42 +42 val_42 +42 val_42 +43 val_43 +44 val_44 +47 val_47 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +5 val_5 +51 val_51 +51 val_51 +51 val_51 +51 val_51 +53 val_53 +54 val_54 +57 val_57 +58 val_58 +58 val_58 +58 val_58 +58 val_58 +64 val_64 +65 val_65 +66 val_66 +67 val_67 +67 val_67 +67 val_67 +67 val_67 +69 val_69 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +72 val_72 +72 val_72 +74 val_74 +76 val_76 +76 val_76 +76 val_76 +76 val_76 +77 val_77 +78 val_78 +8 val_8 +80 val_80 +82 val_82 +83 val_83 +83 val_83 +83 val_83 +83 val_83 +84 val_84 +84 val_84 +84 val_84 +84 val_84 +85 val_85 +86 val_86 +87 val_87 +9 val_9 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +95 val_95 +95 val_95 +95 val_95 +95 val_95 +96 val_96 +97 val_97 +97 val_97 +97 val_97 +97 val_97 +98 val_98 +98 val_98 +98 val_98 +98 val_98 +PREHOOK: query: SELECT mi2.* FROM mi2 +PREHOOK: type: QUERY +PREHOOK: Input: default@mi2 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-28-03_677_3645923501369564199/-mr-10000 +POSTHOOK: query: SELECT mi2.* FROM mi2 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@mi2 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-28-03_677_3645923501369564199/-mr-10000 +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +100 val_100 +100 val_100 +100 val_100 +100 val_100 +103 val_103 +103 val_103 +103 val_103 +103 val_103 +104 val_104 +104 val_104 +104 val_104 +104 val_104 +105 val_105 +111 val_111 +113 val_113 +113 val_113 +113 val_113 +113 val_113 +114 val_114 +116 val_116 +118 val_118 +118 val_118 +118 val_118 +118 val_118 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +119 val_119 +120 val_120 +120 val_120 +120 val_120 +120 val_120 +125 val_125 +125 val_125 +125 val_125 +125 val_125 +126 val_126 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +128 val_128 +129 val_129 +129 val_129 +129 val_129 +129 val_129 +131 val_131 +133 val_133 +134 val_134 +134 val_134 +134 val_134 +134 val_134 +136 val_136 +137 val_137 +137 val_137 +137 val_137 +137 val_137 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +138 val_138 +143 val_143 +145 val_145 +146 val_146 +146 val_146 +146 val_146 +146 val_146 +149 val_149 +149 val_149 +149 val_149 +149 val_149 +150 val_150 +152 val_152 +152 val_152 +152 val_152 +152 val_152 +153 val_153 +155 val_155 +156 val_156 +157 val_157 +158 val_158 +160 val_160 +162 val_162 +163 val_163 +164 val_164 +164 val_164 +164 val_164 +164 val_164 +165 val_165 +165 val_165 +165 val_165 +165 val_165 +166 val_166 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +167 val_167 +168 val_168 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +169 val_169 +170 val_170 +172 val_172 +172 val_172 +172 val_172 +172 val_172 +174 val_174 +174 val_174 +174 val_174 +174 val_174 +175 val_175 +175 val_175 +175 val_175 +175 val_175 +176 val_176 +176 val_176 +176 val_176 +176 val_176 +177 val_177 +178 val_178 +179 val_179 +179 val_179 +179 val_179 +179 val_179 +180 val_180 +181 val_181 +183 val_183 +186 val_186 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +187 val_187 +189 val_189 +190 val_190 +191 val_191 +191 val_191 +191 val_191 +191 val_191 +192 val_192 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +193 val_193 +194 val_194 +195 val_195 +195 val_195 +195 val_195 +195 val_195 +196 val_196 +197 val_197 +197 val_197 +197 val_197 +197 val_197 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +199 val_199 +PREHOOK: query: SELECT mi3.* FROM mi3 +PREHOOK: type: QUERY +PREHOOK: Input: default@mi3@ds=2008-04-08/hr=12 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-28-04_004_5001774654114388405/-mr-10000 +POSTHOOK: query: SELECT mi3.* FROM mi3 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@mi3@ds=2008-04-08/hr=12 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-28-04_004_5001774654114388405/-mr-10000 +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi1.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi2.value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: mi3 PARTITION(ds=2008-04-08,hr=12).key EXPRESSION [(src)a.FieldSchema(name:key, type:string, comment:default), ] +200 2008-04-08 12 +200 2008-04-08 12 +200 2008-04-08 12 +200 2008-04-08 12 +201 2008-04-08 12 +202 2008-04-08 12 +203 2008-04-08 12 +203 2008-04-08 12 +203 2008-04-08 12 +203 2008-04-08 12 +205 2008-04-08 12 +205 2008-04-08 12 +205 2008-04-08 12 +205 2008-04-08 12 +207 2008-04-08 12 +207 2008-04-08 12 +207 2008-04-08 12 +207 2008-04-08 12 +208 2008-04-08 12 +208 2008-04-08 12 +208 2008-04-08 12 +208 2008-04-08 12 +208 2008-04-08 12 +208 2008-04-08 12 +208 2008-04-08 12 +208 2008-04-08 12 +208 2008-04-08 12 +209 2008-04-08 12 +209 2008-04-08 12 +209 2008-04-08 12 +209 2008-04-08 12 +213 2008-04-08 12 +213 2008-04-08 12 +213 2008-04-08 12 +213 2008-04-08 12 +214 2008-04-08 12 +216 2008-04-08 12 +216 2008-04-08 12 +216 2008-04-08 12 +216 2008-04-08 12 +217 2008-04-08 12 +217 2008-04-08 12 +217 2008-04-08 12 +217 2008-04-08 12 +218 2008-04-08 12 +219 2008-04-08 12 +219 2008-04-08 12 +219 2008-04-08 12 +219 2008-04-08 12 +221 2008-04-08 12 +221 2008-04-08 12 +221 2008-04-08 12 +221 2008-04-08 12 +222 2008-04-08 12 +223 2008-04-08 12 +223 2008-04-08 12 +223 2008-04-08 12 +223 2008-04-08 12 +224 2008-04-08 12 +224 2008-04-08 12 +224 2008-04-08 12 +224 2008-04-08 12 +226 2008-04-08 12 +228 2008-04-08 12 +229 2008-04-08 12 +229 2008-04-08 12 +229 2008-04-08 12 +229 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +230 2008-04-08 12 +233 2008-04-08 12 +233 2008-04-08 12 +233 2008-04-08 12 +233 2008-04-08 12 +235 2008-04-08 12 +237 2008-04-08 12 +237 2008-04-08 12 +237 2008-04-08 12 +237 2008-04-08 12 +238 2008-04-08 12 +238 2008-04-08 12 +238 2008-04-08 12 +238 2008-04-08 12 +239 2008-04-08 12 +239 2008-04-08 12 +239 2008-04-08 12 +239 2008-04-08 12 +241 2008-04-08 12 +242 2008-04-08 12 +242 2008-04-08 12 +242 2008-04-08 12 +242 2008-04-08 12 +244 2008-04-08 12 +247 2008-04-08 12 +248 2008-04-08 12 +249 2008-04-08 12 +252 2008-04-08 12 +255 2008-04-08 12 +255 2008-04-08 12 +255 2008-04-08 12 +255 2008-04-08 12 +256 2008-04-08 12 +256 2008-04-08 12 +256 2008-04-08 12 +256 2008-04-08 12 +257 2008-04-08 12 +258 2008-04-08 12 +260 2008-04-08 12 +262 2008-04-08 12 +263 2008-04-08 12 +265 2008-04-08 12 +265 2008-04-08 12 +265 2008-04-08 12 +265 2008-04-08 12 +266 2008-04-08 12 +272 2008-04-08 12 +272 2008-04-08 12 +272 2008-04-08 12 +272 2008-04-08 12 +273 2008-04-08 12 +273 2008-04-08 12 +273 2008-04-08 12 +273 2008-04-08 12 +273 2008-04-08 12 +273 2008-04-08 12 +273 2008-04-08 12 +273 2008-04-08 12 +273 2008-04-08 12 +274 2008-04-08 12 +275 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +277 2008-04-08 12 +278 2008-04-08 12 +278 2008-04-08 12 +278 2008-04-08 12 +278 2008-04-08 12 +280 2008-04-08 12 +280 2008-04-08 12 +280 2008-04-08 12 +280 2008-04-08 12 +281 2008-04-08 12 +281 2008-04-08 12 +281 2008-04-08 12 +281 2008-04-08 12 +282 2008-04-08 12 +282 2008-04-08 12 +282 2008-04-08 12 +282 2008-04-08 12 +283 2008-04-08 12 +284 2008-04-08 12 +285 2008-04-08 12 +286 2008-04-08 12 +287 2008-04-08 12 +288 2008-04-08 12 +288 2008-04-08 12 +288 2008-04-08 12 +288 2008-04-08 12 +289 2008-04-08 12 +291 2008-04-08 12 +292 2008-04-08 12 +296 2008-04-08 12 +298 2008-04-08 12 +298 2008-04-08 12 +298 2008-04-08 12 +298 2008-04-08 12 +298 2008-04-08 12 +298 2008-04-08 12 +298 2008-04-08 12 +298 2008-04-08 12 +298 2008-04-08 12 +val_302 +val_305 +val_306 +val_307 +val_307 +val_307 +val_307 +val_308 +val_309 +val_309 +val_309 +val_309 +val_310 +val_311 +val_311 +val_311 +val_311 +val_311 +val_311 +val_311 +val_311 +val_311 +val_315 +val_316 +val_316 +val_316 +val_316 +val_316 +val_316 +val_316 +val_316 +val_316 +val_317 +val_317 +val_317 +val_317 +val_318 +val_318 +val_318 +val_318 +val_318 +val_318 +val_318 +val_318 +val_318 +val_321 +val_321 +val_321 +val_321 +val_322 +val_322 +val_322 +val_322 +val_323 +val_325 +val_325 +val_325 +val_325 +val_327 +val_327 +val_327 +val_327 +val_327 +val_327 +val_327 +val_327 +val_327 +val_331 +val_331 +val_331 +val_331 +val_332 +val_333 +val_333 +val_333 +val_333 +val_335 +val_336 +val_338 +val_339 +val_341 +val_342 +val_342 +val_342 +val_342 +val_344 +val_344 +val_344 +val_344 +val_345 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_348 +val_351 +val_353 +val_353 +val_353 +val_353 +val_356 +val_360 +val_362 +val_364 +val_365 +val_366 +val_367 +val_367 +val_367 +val_367 +val_368 +val_369 +val_369 +val_369 +val_369 +val_369 +val_369 +val_369 +val_369 +val_369 +val_373 +val_374 +val_375 +val_377 +val_378 +val_379 +val_382 +val_382 +val_382 +val_382 +val_384 +val_384 +val_384 +val_384 +val_384 +val_384 +val_384 +val_384 +val_384 +val_386 +val_389 +val_392 +val_393 +val_394 +val_395 +val_395 +val_395 +val_395 +val_396 +val_396 +val_396 +val_396 +val_396 +val_396 +val_396 +val_396 +val_396 +val_397 +val_397 +val_397 +val_397 +val_399 +val_399 +val_399 +val_399 +val_400 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_401 +val_402 +val_403 +val_403 +val_403 +val_403 +val_403 +val_403 +val_403 +val_403 +val_403 +val_404 +val_404 +val_404 +val_404 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_406 +val_407 +val_409 +val_409 +val_409 +val_409 +val_409 +val_409 +val_409 +val_409 +val_409 +val_411 +val_413 +val_413 +val_413 +val_413 +val_414 +val_414 +val_414 +val_414 +val_417 +val_417 +val_417 +val_417 +val_417 +val_417 +val_417 +val_417 +val_417 +val_418 +val_419 +val_421 +val_424 +val_424 +val_424 +val_424 +val_427 +val_429 +val_429 +val_429 +val_429 +val_430 +val_430 +val_430 +val_430 +val_430 +val_430 +val_430 +val_430 +val_430 +val_431 +val_431 +val_431 +val_431 +val_431 +val_431 +val_431 +val_431 +val_431 +val_432 +val_435 +val_436 +val_437 +val_438 +val_438 +val_438 +val_438 +val_438 +val_438 +val_438 +val_438 +val_438 +val_439 +val_439 +val_439 +val_439 +val_443 +val_444 +val_446 +val_448 +val_449 +val_452 +val_453 +val_454 +val_454 +val_454 +val_454 +val_454 +val_454 +val_454 +val_454 +val_454 +val_455 +val_457 +val_458 +val_458 +val_458 +val_458 +val_459 +val_459 +val_459 +val_459 +val_460 +val_462 +val_462 +val_462 +val_462 +val_463 +val_463 +val_463 +val_463 +val_466 +val_466 +val_466 +val_466 +val_466 +val_466 +val_466 +val_466 +val_466 +val_467 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_468 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_469 +val_470 +val_472 +val_475 +val_477 +val_478 +val_478 +val_478 +val_478 +val_479 +val_480 +val_480 +val_480 +val_480 +val_480 +val_480 +val_480 +val_480 +val_480 +val_481 +val_482 +val_483 +val_484 +val_485 +val_487 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_489 +val_490 +val_491 +val_492 +val_492 +val_492 +val_492 +val_493 +val_494 +val_495 +val_496 +val_497 +val_498 +val_498 +val_498 +val_498 +val_498 +val_498 +val_498 +val_498 +val_498 Index: ql/src/test/results/clientpositive/ppd_outer_join1.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_outer_join1.q.out (working copy) @@ -110,7 +110,7 @@ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-14_648_6362739881959261544/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-30-10_517_1449004673525932594/-mr-10000 POSTHOOK: query: FROM src a LEFT OUTER JOIN @@ -120,10 +120,139 @@ WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-14_648_6362739881959261544/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-30-10_517_1449004673525932594/-mr-10000 17 val_17 17 val_17 18 val_18 18 val_18 18 val_18 18 val_18 18 val_18 18 val_18 18 val_18 18 val_18 19 val_19 19 val_19 +PREHOOK: query: EXPLAIN + FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN + FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) 10) (< (. (TOK_TABLE_OR_COL a) key) 20)) (> (. (TOK_TABLE_OR_COL b) key) 15)) (< (. (TOK_TABLE_OR_COL b) key) 25))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a + TableScan + alias: a + Filter Operator + predicate: + expr: ((key > 10) and (key < 20)) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col4, _col5 + Filter Operator + predicate: + expr: ((_col4 > 15) and (_col4 < 25)) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-30-20_062_1274880924259137309/-mr-10000 +POSTHOOK: query: FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > 10 AND a.key < 20 AND b.key > 15 AND b.key < 25 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-30-20_062_1274880924259137309/-mr-10000 +17 val_17 17 val_17 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +19 val_19 19 val_19 Index: ql/src/test/results/clientpositive/ppd_outer_join2.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_outer_join2.q.out (working copy) @@ -110,7 +110,7 @@ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-18_662_1007936007610334670/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-32-34_464_515670423172601583/-mr-10000 POSTHOOK: query: FROM src a RIGHT OUTER JOIN @@ -120,7 +120,7 @@ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-18_662_1007936007610334670/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-32-34_464_515670423172601583/-mr-10000 150 val_150 150 val_150 152 val_152 152 val_152 152 val_152 152 val_152 @@ -247,3 +247,252 @@ 199 val_199 199 val_199 199 val_199 199 val_199 2 val_2 2 val_2 +PREHOOK: query: EXPLAIN + FROM + src a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN + FROM + src a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) '10') (< (. (TOK_TABLE_OR_COL a) key) '20')) (> (. (TOK_TABLE_OR_COL b) key) '15')) (< (. (TOK_TABLE_OR_COL b) key) '25'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + b + TableScan + alias: b + Filter Operator + predicate: + expr: ((key > '15') and (key < '25')) + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + Reduce Operator Tree: + Join Operator + condition map: + Right Outer Join0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col4, _col5 + Filter Operator + predicate: + expr: ((_col0 > '10') and (_col0 < '20')) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: FROM + src a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-32-44_509_6575656481631571931/-mr-10000 +POSTHOOK: query: FROM + src a + RIGHT OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-32-44_509_6575656481631571931/-mr-10000 +150 val_150 150 val_150 +152 val_152 152 val_152 +152 val_152 152 val_152 +152 val_152 152 val_152 +152 val_152 152 val_152 +153 val_153 153 val_153 +155 val_155 155 val_155 +156 val_156 156 val_156 +157 val_157 157 val_157 +158 val_158 158 val_158 +160 val_160 160 val_160 +162 val_162 162 val_162 +163 val_163 163 val_163 +164 val_164 164 val_164 +164 val_164 164 val_164 +164 val_164 164 val_164 +164 val_164 164 val_164 +165 val_165 165 val_165 +165 val_165 165 val_165 +165 val_165 165 val_165 +165 val_165 165 val_165 +166 val_166 166 val_166 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +168 val_168 168 val_168 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +17 val_17 17 val_17 +170 val_170 170 val_170 +172 val_172 172 val_172 +172 val_172 172 val_172 +172 val_172 172 val_172 +172 val_172 172 val_172 +174 val_174 174 val_174 +174 val_174 174 val_174 +174 val_174 174 val_174 +174 val_174 174 val_174 +175 val_175 175 val_175 +175 val_175 175 val_175 +175 val_175 175 val_175 +175 val_175 175 val_175 +176 val_176 176 val_176 +176 val_176 176 val_176 +176 val_176 176 val_176 +176 val_176 176 val_176 +177 val_177 177 val_177 +178 val_178 178 val_178 +179 val_179 179 val_179 +179 val_179 179 val_179 +179 val_179 179 val_179 +179 val_179 179 val_179 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +180 val_180 180 val_180 +181 val_181 181 val_181 +183 val_183 183 val_183 +186 val_186 186 val_186 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +189 val_189 189 val_189 +19 val_19 19 val_19 +190 val_190 190 val_190 +191 val_191 191 val_191 +191 val_191 191 val_191 +191 val_191 191 val_191 +191 val_191 191 val_191 +192 val_192 192 val_192 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +194 val_194 194 val_194 +195 val_195 195 val_195 +195 val_195 195 val_195 +195 val_195 195 val_195 +195 val_195 195 val_195 +196 val_196 196 val_196 +197 val_197 197 val_197 +197 val_197 197 val_197 +197 val_197 197 val_197 +197 val_197 197 val_197 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +2 val_2 2 val_2 Index: ql/src/test/results/clientpositive/ppd_outer_join3.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_outer_join3.q.out (working copy) @@ -106,7 +106,7 @@ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-22_895_2367881874317279593/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-35-23_324_1018503791576708396/-mr-10000 POSTHOOK: query: FROM src a FULL OUTER JOIN @@ -116,7 +116,7 @@ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-22_895_2367881874317279593/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-35-23_324_1018503791576708396/-mr-10000 150 val_150 150 val_150 152 val_152 152 val_152 152 val_152 152 val_152 @@ -243,3 +243,248 @@ 199 val_199 199 val_199 199 val_199 199 val_199 2 val_2 2 val_2 +PREHOOK: query: EXPLAIN + FROM + src a + FULL OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN + FROM + src a + FULL OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_FULLOUTERJOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value))) (TOK_WHERE (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) '10') (< (. (TOK_TABLE_OR_COL a) key) '20')) (> (. (TOK_TABLE_OR_COL b) key) '15')) (< (. (TOK_TABLE_OR_COL b) key) '25'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + Reduce Operator Tree: + Join Operator + condition map: + Outer Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col4, _col5 + Filter Operator + predicate: + expr: (((_col4 > '15') and (_col4 < '25')) and ((_col0 > '10') and (_col0 < '20'))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: FROM + src a + FULL OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-35-35_138_6944101774390059493/-mr-10000 +POSTHOOK: query: FROM + src a + FULL OUTER JOIN + src b + ON (a.key = b.key) + SELECT a.key, a.value, b.key, b.value + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-35-35_138_6944101774390059493/-mr-10000 +150 val_150 150 val_150 +152 val_152 152 val_152 +152 val_152 152 val_152 +152 val_152 152 val_152 +152 val_152 152 val_152 +153 val_153 153 val_153 +155 val_155 155 val_155 +156 val_156 156 val_156 +157 val_157 157 val_157 +158 val_158 158 val_158 +160 val_160 160 val_160 +162 val_162 162 val_162 +163 val_163 163 val_163 +164 val_164 164 val_164 +164 val_164 164 val_164 +164 val_164 164 val_164 +164 val_164 164 val_164 +165 val_165 165 val_165 +165 val_165 165 val_165 +165 val_165 165 val_165 +165 val_165 165 val_165 +166 val_166 166 val_166 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +167 val_167 167 val_167 +168 val_168 168 val_168 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +169 val_169 169 val_169 +17 val_17 17 val_17 +170 val_170 170 val_170 +172 val_172 172 val_172 +172 val_172 172 val_172 +172 val_172 172 val_172 +172 val_172 172 val_172 +174 val_174 174 val_174 +174 val_174 174 val_174 +174 val_174 174 val_174 +174 val_174 174 val_174 +175 val_175 175 val_175 +175 val_175 175 val_175 +175 val_175 175 val_175 +175 val_175 175 val_175 +176 val_176 176 val_176 +176 val_176 176 val_176 +176 val_176 176 val_176 +176 val_176 176 val_176 +177 val_177 177 val_177 +178 val_178 178 val_178 +179 val_179 179 val_179 +179 val_179 179 val_179 +179 val_179 179 val_179 +179 val_179 179 val_179 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +18 val_18 18 val_18 +180 val_180 180 val_180 +181 val_181 181 val_181 +183 val_183 183 val_183 +186 val_186 186 val_186 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +187 val_187 187 val_187 +189 val_189 189 val_189 +19 val_19 19 val_19 +190 val_190 190 val_190 +191 val_191 191 val_191 +191 val_191 191 val_191 +191 val_191 191 val_191 +191 val_191 191 val_191 +192 val_192 192 val_192 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +193 val_193 193 val_193 +194 val_194 194 val_194 +195 val_195 195 val_195 +195 val_195 195 val_195 +195 val_195 195 val_195 +195 val_195 195 val_195 +196 val_196 196 val_196 +197 val_197 197 val_197 +197 val_197 197 val_197 +197 val_197 197 val_197 +197 val_197 197 val_197 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +199 val_199 199 val_199 +2 val_2 2 val_2 Index: ql/src/test/results/clientpositive/ppd_outer_join4.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_outer_join4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_outer_join4.q.out (working copy) @@ -134,7 +134,7 @@ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-26_835_1439714224998684331/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-11_041_8830294243573092446/-mr-10000 POSTHOOK: query: FROM src a LEFT OUTER JOIN @@ -147,7 +147,7 @@ WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-26_835_1439714224998684331/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-11_041_8830294243573092446/-mr-10000 150 val_150 150 val_150 150 152 val_152 152 val_152 152 152 val_152 152 val_152 152 @@ -378,3 +378,383 @@ 199 val_199 199 val_199 199 199 val_199 199 val_199 199 2 val_2 2 val_2 2 +PREHOOK: query: EXPLAIN + FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + RIGHT OUTER JOIN + src c + ON (a.key = c.key) + SELECT a.key, a.value, b.key, b.value, c.key + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN + FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + RIGHT OUTER JOIN + src c + ON (a.key = c.key) + SELECT a.key, a.value, b.key, b.value, c.key + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_RIGHTOUTERJOIN (TOK_LEFTOUTERJOIN (TOK_TABREF (TOK_TABNAME src) a) (TOK_TABREF (TOK_TABNAME src) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key))) (TOK_TABREF (TOK_TABNAME src) c) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL c) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL c) key))) (TOK_WHERE (AND (AND (AND (AND (> (. (TOK_TABLE_OR_COL a) key) '10') (< (. (TOK_TABLE_OR_COL a) key) '20')) (> (. (TOK_TABLE_OR_COL b) key) '15')) (< (. (TOK_TABLE_OR_COL b) key) '25')) (<> (TOK_FUNCTION sqrt (. (TOK_TABLE_OR_COL c) key)) 13))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a + TableScan + alias: a + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + c + TableScan + alias: c + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 2 + value expressions: + expr: key + type: string + Reduce Operator Tree: + Join Operator + condition map: + Left Outer Join0 to 1 + Right Outer Join0 to 2 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} + 1 {VALUE._col0} {VALUE._col1} + 2 {VALUE._col0} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col4, _col5, _col8 + Filter Operator + predicate: + expr: ((((_col4 > '15') and (_col4 < '25')) and (sqrt(_col8) <> 13)) and ((_col0 > '10') and (_col0 < '20'))) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col8 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + RIGHT OUTER JOIN + src c + ON (a.key = c.key) + SELECT a.key, a.value, b.key, b.value, c.key + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-20_602_416360783321217123/-mr-10000 +POSTHOOK: query: FROM + src a + LEFT OUTER JOIN + src b + ON (a.key = b.key) + RIGHT OUTER JOIN + src c + ON (a.key = c.key) + SELECT a.key, a.value, b.key, b.value, c.key + WHERE a.key > '10' AND a.key < '20' AND b.key > '15' AND b.key < '25' AND sqrt(c.key) <> 13 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-38-20_602_416360783321217123/-mr-10000 +150 val_150 150 val_150 150 +152 val_152 152 val_152 152 +152 val_152 152 val_152 152 +152 val_152 152 val_152 152 +152 val_152 152 val_152 152 +152 val_152 152 val_152 152 +152 val_152 152 val_152 152 +152 val_152 152 val_152 152 +152 val_152 152 val_152 152 +153 val_153 153 val_153 153 +155 val_155 155 val_155 155 +156 val_156 156 val_156 156 +157 val_157 157 val_157 157 +158 val_158 158 val_158 158 +160 val_160 160 val_160 160 +162 val_162 162 val_162 162 +163 val_163 163 val_163 163 +164 val_164 164 val_164 164 +164 val_164 164 val_164 164 +164 val_164 164 val_164 164 +164 val_164 164 val_164 164 +164 val_164 164 val_164 164 +164 val_164 164 val_164 164 +164 val_164 164 val_164 164 +164 val_164 164 val_164 164 +165 val_165 165 val_165 165 +165 val_165 165 val_165 165 +165 val_165 165 val_165 165 +165 val_165 165 val_165 165 +165 val_165 165 val_165 165 +165 val_165 165 val_165 165 +165 val_165 165 val_165 165 +165 val_165 165 val_165 165 +166 val_166 166 val_166 166 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +167 val_167 167 val_167 167 +168 val_168 168 val_168 168 +17 val_17 17 val_17 17 +170 val_170 170 val_170 170 +172 val_172 172 val_172 172 +172 val_172 172 val_172 172 +172 val_172 172 val_172 172 +172 val_172 172 val_172 172 +172 val_172 172 val_172 172 +172 val_172 172 val_172 172 +172 val_172 172 val_172 172 +172 val_172 172 val_172 172 +174 val_174 174 val_174 174 +174 val_174 174 val_174 174 +174 val_174 174 val_174 174 +174 val_174 174 val_174 174 +174 val_174 174 val_174 174 +174 val_174 174 val_174 174 +174 val_174 174 val_174 174 +174 val_174 174 val_174 174 +175 val_175 175 val_175 175 +175 val_175 175 val_175 175 +175 val_175 175 val_175 175 +175 val_175 175 val_175 175 +175 val_175 175 val_175 175 +175 val_175 175 val_175 175 +175 val_175 175 val_175 175 +175 val_175 175 val_175 175 +176 val_176 176 val_176 176 +176 val_176 176 val_176 176 +176 val_176 176 val_176 176 +176 val_176 176 val_176 176 +176 val_176 176 val_176 176 +176 val_176 176 val_176 176 +176 val_176 176 val_176 176 +176 val_176 176 val_176 176 +177 val_177 177 val_177 177 +178 val_178 178 val_178 178 +179 val_179 179 val_179 179 +179 val_179 179 val_179 179 +179 val_179 179 val_179 179 +179 val_179 179 val_179 179 +179 val_179 179 val_179 179 +179 val_179 179 val_179 179 +179 val_179 179 val_179 179 +179 val_179 179 val_179 179 +18 val_18 18 val_18 18 +18 val_18 18 val_18 18 +18 val_18 18 val_18 18 +18 val_18 18 val_18 18 +18 val_18 18 val_18 18 +18 val_18 18 val_18 18 +18 val_18 18 val_18 18 +18 val_18 18 val_18 18 +180 val_180 180 val_180 180 +181 val_181 181 val_181 181 +183 val_183 183 val_183 183 +186 val_186 186 val_186 186 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +187 val_187 187 val_187 187 +189 val_189 189 val_189 189 +19 val_19 19 val_19 19 +190 val_190 190 val_190 190 +191 val_191 191 val_191 191 +191 val_191 191 val_191 191 +191 val_191 191 val_191 191 +191 val_191 191 val_191 191 +191 val_191 191 val_191 191 +191 val_191 191 val_191 191 +191 val_191 191 val_191 191 +191 val_191 191 val_191 191 +192 val_192 192 val_192 192 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +193 val_193 193 val_193 193 +194 val_194 194 val_194 194 +195 val_195 195 val_195 195 +195 val_195 195 val_195 195 +195 val_195 195 val_195 195 +195 val_195 195 val_195 195 +195 val_195 195 val_195 195 +195 val_195 195 val_195 195 +195 val_195 195 val_195 195 +195 val_195 195 val_195 195 +196 val_196 196 val_196 196 +197 val_197 197 val_197 197 +197 val_197 197 val_197 197 +197 val_197 197 val_197 197 +197 val_197 197 val_197 197 +197 val_197 197 val_197 197 +197 val_197 197 val_197 197 +197 val_197 197 val_197 197 +197 val_197 197 val_197 197 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +199 val_199 199 val_199 199 +2 val_2 2 val_2 2 Index: ql/src/test/results/clientpositive/ppd_random.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_random.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_random.q.out (working copy) @@ -109,3 +109,110 @@ limit: -1 +PREHOOK: query: EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 +WHERE rand() > 0.5 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT src1.c1, src2.c4 +FROM +(SELECT src.key as c1, src.value as c2 from src ) src1 +JOIN +(SELECT src.key as c3, src.value as c4 from src where src.key > '2' ) src2 +ON src1.c1 = src2.c3 +WHERE rand() > 0.5 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c1) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c2)))) src1) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key) c3) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value) c4)) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '2')))) src2) (= (. (TOK_TABLE_OR_COL src1) c1) (. (TOK_TABLE_OR_COL src2) c3)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src1) c1)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src2) c4))) (TOK_WHERE (> (TOK_FUNCTION rand) 0.5)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + src1:src + TableScan + alias: src + Select Operator + expressions: + expr: key + type: string + outputColumnNames: _col0 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string + src2:src + TableScan + alias: src + Filter Operator + predicate: + expr: (key > '2') + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 1 + value expressions: + expr: _col1 + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} + 1 {VALUE._col1} + handleSkewJoin: false + outputColumnNames: _col0, _col3 + Filter Operator + predicate: + expr: (rand() > 0.5) + type: boolean + Select Operator + expressions: + expr: _col0 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + Index: ql/src/test/results/clientpositive/ppd_transform.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_transform.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_transform.q.out (working copy) @@ -90,7 +90,7 @@ SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-31_072_8845907404314698533/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-50-56_212_3129782761910965240/-mr-10000 POSTHOOK: query: FROM ( FROM src SELECT TRANSFORM(src.key, src.value) @@ -100,7 +100,7 @@ SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-31_072_8845907404314698533/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-50-56_212_3129782761910965240/-mr-10000 0 val_0 0 val_0 0 val_0 @@ -185,3 +185,190 @@ 97 val_97 98 val_98 98 val_98 +PREHOOK: query: EXPLAIN +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TRANSFORM (TOK_EXPLIST (. (TOK_TABLE_OR_COL src) key) (. (TOK_TABLE_OR_COL src) value)) TOK_SERDE TOK_RECORDWRITER '/bin/cat' TOK_SERDE TOK_RECORDREADER (TOK_ALIASLIST tkey tvalue)))) (TOK_CLUSTERBY (TOK_TABLE_OR_COL tkey)))) tmap)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tkey)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL tmap) tvalue))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL tmap) tkey) 100)))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + tmap:src + TableScan + alias: src + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Filter Operator + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + Reduce Operator Tree: + Extract + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-51-05_287_665431322083364408/-mr-10000 +POSTHOOK: query: FROM ( + FROM src + SELECT TRANSFORM(src.key, src.value) + USING '/bin/cat' AS (tkey, tvalue) + CLUSTER BY tkey +) tmap +SELECT tmap.tkey, tmap.tvalue WHERE tmap.tkey < 100 +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-51-05_287_665431322083364408/-mr-10000 +0 val_0 +0 val_0 +0 val_0 +10 val_10 +11 val_11 +12 val_12 +12 val_12 +15 val_15 +15 val_15 +17 val_17 +18 val_18 +18 val_18 +19 val_19 +2 val_2 +20 val_20 +24 val_24 +24 val_24 +26 val_26 +26 val_26 +27 val_27 +28 val_28 +30 val_30 +33 val_33 +34 val_34 +35 val_35 +35 val_35 +35 val_35 +37 val_37 +37 val_37 +4 val_4 +41 val_41 +42 val_42 +42 val_42 +43 val_43 +44 val_44 +47 val_47 +5 val_5 +5 val_5 +5 val_5 +51 val_51 +51 val_51 +53 val_53 +54 val_54 +57 val_57 +58 val_58 +58 val_58 +64 val_64 +65 val_65 +66 val_66 +67 val_67 +67 val_67 +69 val_69 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +74 val_74 +76 val_76 +76 val_76 +77 val_77 +78 val_78 +8 val_8 +80 val_80 +82 val_82 +83 val_83 +83 val_83 +84 val_84 +84 val_84 +85 val_85 +86 val_86 +87 val_87 +9 val_9 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +95 val_95 +95 val_95 +96 val_96 +97 val_97 +97 val_97 +98 val_98 +98 val_98 Index: ql/src/test/results/clientpositive/ppd_udf_case.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_udf_case.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_udf_case.q.out (working copy) @@ -124,7 +124,7 @@ Stage: Stage-2 Map Reduce Alias -> Map Operator Tree: - file:/tmp/sdong/hive_2011-02-10_16-57-34_795_6886645259044041600/-mr-10002 + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-19_011_6078693060764998539/-mr-10002 Reduce Output Operator key expressions: expr: _col0 @@ -190,7 +190,7 @@ PREHOOK: type: QUERY PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-34_974_8461060314707067318/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-20_085_7457265695938103038/-mr-10000 POSTHOOK: query: SELECT * FROM srcpart a JOIN srcpart b ON a.key = b.key @@ -205,8 +205,216 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-34_974_8461060314707067318/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-20_085_7457265695938103038/-mr-10000 27 val_27 2008-04-08 11 27 val_27 2008-04-08 11 27 val_27 2008-04-08 11 27 val_27 2008-04-08 12 27 val_27 2008-04-08 12 27 val_27 2008-04-08 11 27 val_27 2008-04-08 12 27 val_27 2008-04-08 12 +PREHOOK: query: EXPLAIN +SELECT * +FROM srcpart a JOIN srcpart b +ON a.key = b.key +WHERE a.ds = '2008-04-08' AND + b.ds = '2008-04-08' AND + CASE a.key + WHEN '27' THEN TRUE + WHEN '38' THEN FALSE + ELSE NULL + END +ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +SELECT * +FROM srcpart a JOIN srcpart b +ON a.key = b.key +WHERE a.ds = '2008-04-08' AND + b.ds = '2008-04-08' AND + CASE a.key + WHEN '27' THEN TRUE + WHEN '38' THEN FALSE + ELSE NULL + END +ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME srcpart) a) (TOK_TABREF (TOK_TABNAME srcpart) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (AND (AND (= (. (TOK_TABLE_OR_COL a) ds) '2008-04-08') (= (. (TOK_TABLE_OR_COL b) ds) '2008-04-08')) (TOK_FUNCTION CASE (. (TOK_TABLE_OR_COL a) key) '27' TRUE '38' FALSE TOK_NULL))) (TOK_ORDERBY (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL a) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL a) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL a) ds)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL a) hr)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL b) key)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL b) value)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL b) ds)) (TOK_TABSORTCOLNAMEASC (. (TOK_TABLE_OR_COL b) hr))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-2 depends on stages: Stage-1 + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + a + TableScan + alias: a + Filter Operator + predicate: + expr: CASE (key) WHEN ('27') THEN (true) WHEN ('38') THEN (false) ELSE (null) END + type: boolean + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + b + TableScan + alias: b + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + Reduce Operator Tree: + Join Operator + condition map: + Inner Join 0 to 1 + condition expressions: + 0 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} + 1 {VALUE._col0} {VALUE._col1} {VALUE._col2} {VALUE._col3} + handleSkewJoin: false + outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7, _col8, _col9 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + expr: _col8 + type: string + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + Stage: Stage-2 + Map Reduce + Alias -> Map Operator Tree: + file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-40_322_7654615073140246084/-mr-10002 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + sort order: ++++++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + expr: _col4 + type: string + expr: _col5 + type: string + expr: _col6 + type: string + expr: _col7 + type: string + Reduce Operator Tree: + Extract + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: SELECT * +FROM srcpart a JOIN srcpart b +ON a.key = b.key +WHERE a.ds = '2008-04-08' AND + b.ds = '2008-04-08' AND + CASE a.key + WHEN '27' THEN TRUE + WHEN '38' THEN FALSE + ELSE NULL + END +ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr +PREHOOK: type: QUERY +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-40_840_3692167909003016050/-mr-10000 +POSTHOOK: query: SELECT * +FROM srcpart a JOIN srcpart b +ON a.key = b.key +WHERE a.ds = '2008-04-08' AND + b.ds = '2008-04-08' AND + CASE a.key + WHEN '27' THEN TRUE + WHEN '38' THEN FALSE + ELSE NULL + END +ORDER BY a.key, a.value, a.ds, a.hr, b.key, b.value, b.ds, b.hr +POSTHOOK: type: QUERY +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-53-40_840_3692167909003016050/-mr-10000 +27 val_27 2008-04-08 11 27 val_27 2008-04-08 11 +27 val_27 2008-04-08 11 27 val_27 2008-04-08 12 +27 val_27 2008-04-08 12 27 val_27 2008-04-08 11 +27 val_27 2008-04-08 12 27 val_27 2008-04-08 12 Index: ql/src/test/results/clientpositive/ppd_union.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_union.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppd_union.q.out (working copy) @@ -114,7 +114,7 @@ WHERE key > '4' and value > 'val_4' PREHOOK: type: QUERY PREHOOK: Input: default@src -PREHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-42_422_7786150548202984482/-mr-10000 +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-56-10_639_4479616588694636353/-mr-10000 POSTHOOK: query: FROM ( FROM src select src.key, src.value WHERE src.key < '100' UNION ALL @@ -124,7 +124,7 @@ WHERE key > '4' and value > 'val_4' POSTHOOK: type: QUERY POSTHOOK: Input: default@src -POSTHOOK: Output: file:/tmp/sdong/hive_2011-02-10_16-57-42_422_7786150548202984482/-mr-10000 +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-56-10_639_4479616588694636353/-mr-10000 86 val_86 409 val_409 98 val_98 @@ -296,3 +296,285 @@ 403 val_403 400 val_400 97 val_97 +PREHOOK: query: EXPLAIN +FROM ( + FROM src select src.key, src.value WHERE src.key < '100' + UNION ALL + FROM src SELECT src.* WHERE src.key > '150' +) unioned_query +SELECT unioned_query.* + WHERE key > '4' and value > 'val_4' +PREHOOK: type: QUERY +POSTHOOK: query: EXPLAIN +FROM ( + FROM src select src.key, src.value WHERE src.key < '100' + UNION ALL + FROM src SELECT src.* WHERE src.key > '150' +) unioned_query +SELECT unioned_query.* + WHERE key > '4' and value > 'val_4' +POSTHOOK: type: QUERY +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_UNION (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key)) (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) value))) (TOK_WHERE (< (. (TOK_TABLE_OR_COL src) key) '100')))) (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME src))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME src)))) (TOK_WHERE (> (. (TOK_TABLE_OR_COL src) key) '150'))))) unioned_query)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_ALLCOLREF (TOK_TABNAME unioned_query)))) (TOK_WHERE (and (> (TOK_TABLE_OR_COL key) '4') (> (TOK_TABLE_OR_COL value) 'val_4'))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + null-subquery1:unioned_query-subquery1:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((key < '100') and ((key > '4') and (value > 'val_4'))) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + null-subquery2:unioned_query-subquery2:src + TableScan + alias: src + Filter Operator + predicate: + expr: ((key > '150') and ((key > '4') and (value > 'val_4'))) + type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: FROM ( + FROM src select src.key, src.value WHERE src.key < '100' + UNION ALL + FROM src SELECT src.* WHERE src.key > '150' +) unioned_query +SELECT unioned_query.* + WHERE key > '4' and value > 'val_4' +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-56-19_201_5931398035634081896/-mr-10000 +POSTHOOK: query: FROM ( + FROM src select src.key, src.value WHERE src.key < '100' + UNION ALL + FROM src SELECT src.* WHERE src.key > '150' +) unioned_query +SELECT unioned_query.* + WHERE key > '4' and value > 'val_4' +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-22_02-56-19_201_5931398035634081896/-mr-10000 +86 val_86 +409 val_409 +98 val_98 +484 val_484 +401 val_401 +66 val_66 +406 val_406 +429 val_429 +469 val_469 +495 val_495 +82 val_82 +403 val_403 +417 val_417 +430 val_430 +446 val_446 +459 val_459 +482 val_482 +413 val_413 +494 val_494 +466 val_466 +417 val_417 +489 val_489 +439 val_439 +475 val_475 +455 val_455 +57 val_57 +438 val_438 +489 val_489 +92 val_92 +47 val_47 +72 val_72 +427 val_427 +498 val_498 +437 val_437 +469 val_469 +54 val_54 +459 val_459 +51 val_51 +430 val_430 +65 val_65 +83 val_83 +67 val_67 +404 val_404 +489 val_489 +84 val_84 +466 val_466 +58 val_58 +8 val_8 +411 val_411 +463 val_463 +431 val_431 +42 val_42 +496 val_496 +468 val_468 +454 val_454 +418 val_418 +96 val_96 +51 val_51 +404 val_404 +43 val_43 +436 val_436 +469 val_469 +468 val_468 +95 val_95 +481 val_481 +457 val_457 +98 val_98 +409 val_409 +470 val_470 +413 val_413 +85 val_85 +77 val_77 +490 val_490 +87 val_87 +419 val_419 +72 val_72 +90 val_90 +435 val_435 +401 val_401 +452 val_452 +5 val_5 +497 val_497 +402 val_402 +58 val_58 +95 val_95 +472 val_472 +498 val_498 +42 val_42 +430 val_430 +489 val_489 +458 val_458 +78 val_78 +76 val_76 +41 val_41 +492 val_492 +449 val_449 +453 val_453 +64 val_64 +468 val_468 +76 val_76 +74 val_74 +69 val_69 +485 val_485 +70 val_70 +487 val_487 +480 val_480 +401 val_401 +5 val_5 +438 val_438 +467 val_467 +432 val_432 +469 val_469 +463 val_463 +80 val_80 +44 val_44 +466 val_466 +403 val_403 +483 val_483 +53 val_53 +406 val_406 +409 val_409 +406 val_406 +401 val_401 +90 val_90 +424 val_424 +431 val_431 +454 val_454 +478 val_478 +431 val_431 +424 val_424 +5 val_5 +70 val_70 +480 val_480 +70 val_70 +438 val_438 +414 val_414 +491 val_491 +439 val_439 +479 val_479 +417 val_417 +444 val_444 +429 val_429 +443 val_443 +478 val_478 +468 val_468 +493 val_493 +460 val_460 +480 val_480 +83 val_83 +462 val_462 +406 val_406 +454 val_454 +401 val_401 +421 val_421 +407 val_407 +67 val_67 +462 val_462 +492 val_492 +9 val_9 +498 val_498 +458 val_458 +97 val_97 +469 val_469 +84 val_84 +448 val_448 +414 val_414 +477 val_477 +90 val_90 +403 val_403 +400 val_400 +97 val_97 Index: ql/src/test/results/clientpositive/ppr_pushdown3.q.out =================================================================== --- ql/src/test/results/clientpositive/ppr_pushdown3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/ppr_pushdown3.q.out (working copy) @@ -20,27 +20,23 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/query_result_fileformat.q.out =================================================================== --- ql/src/test/results/clientpositive/query_result_fileformat.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/query_result_fileformat.q.out (working copy) @@ -56,23 +56,19 @@ predicate: expr: (key = 'key1') type: boolean - Filter Operator - predicate: - expr: (key = 'key1') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -136,23 +132,19 @@ predicate: expr: (key = 'key1') type: boolean - Filter Operator - predicate: - expr: (key = 'key1') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/quote1.q.out =================================================================== --- ql/src/test/results/clientpositive/quote1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/quote1.q.out (working copy) @@ -33,32 +33,28 @@ predicate: expr: ((key >= 200) and (key < 300)) type: boolean - Filter Operator - predicate: - expr: ((key >= 200) and (key < 300)) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value - type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator @@ -122,25 +118,21 @@ predicate: expr: (table = '2008-04-08') type: boolean - Filter Operator - predicate: - expr: (table = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: location - type: int - expr: type - type: string - expr: table - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: location + type: int + expr: type + type: string + expr: table + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy) @@ -143,38 +143,33 @@ predicate: expr: (not ((key > 50) or (key < 10))) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (((ds = '2008-04-08') and (not ((key > 50) or (key < 10)))) and (hr like '%2')) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_16-59-31_097_6104330106842070340/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_16-59-31_097_6104330106842070340/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3 - columns.types string:string:string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-41-58_245_7085869776801424539/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-41-58_245_7085869776801424539/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3 + columns.types string:string:string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 [a] Index: ql/src/test/results/clientpositive/rcfile_null_value.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_null_value.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/rcfile_null_value.q.out (working copy) @@ -106,31 +106,27 @@ predicate: expr: ((key > 10) and (key < 20)) type: boolean - Filter Operator - predicate: - expr: ((key > 10) and (key < 20)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 0 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string c:b:src2 TableScan alias: src2 @@ -138,31 +134,27 @@ predicate: expr: ((key > 15) and (key < 25)) type: boolean - Filter Operator - predicate: - expr: ((key > 15) and (key < 25)) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - expr: value + sort order: + + Map-reduce partition columns: + expr: _col0 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + tag: 1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/reduce_deduplicate.q.out =================================================================== --- ql/src/test/results/clientpositive/reduce_deduplicate.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/reduce_deduplicate.q.out (working copy) @@ -258,52 +258,47 @@ predicate: expr: (ds = '2010-03-29') type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2010-03-29') - type: boolean - Select Operator - expressions: - expr: aet - type: string - expr: aes - type: string - outputColumnNames: _col0, _col1 - Transform Operator - command: cat - output info: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2,_col3,_col4,_col5,_col6 - columns.types string,string,int,string,bigint,string,string - field.delim 9 - serialization.format 9 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: int - expr: _col3 - type: string - expr: _col4 - type: bigint - expr: _col5 - type: string - expr: _col6 - type: string + Select Operator + expressions: + expr: aet + type: string + expr: aes + type: string + outputColumnNames: _col0, _col1 + Transform Operator + command: cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2,_col3,_col4,_col5,_col6 + columns.types string,string,int,string,bigint,string,string + field.delim 9 + serialization.format 9 + Reduce Output Operator + key expressions: + expr: _col1 + type: string + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: int + expr: _col3 + type: string + expr: _col4 + type: bigint + expr: _col5 + type: string + expr: _col6 + type: string Needs Tagging: false Reduce Operator Tree: Extract Index: ql/src/test/results/clientpositive/regex_col.q.out =================================================================== --- ql/src/test/results/clientpositive/regex_col.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/regex_col.q.out (working copy) @@ -216,27 +216,23 @@ predicate: expr: (key = 103) type: boolean - Filter Operator - predicate: - expr: (key = 103) - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - expr: hr - type: string - expr: ds - type: string - sort order: +++ - Map-reduce partition columns: - expr: key - type: string - expr: hr - type: string - expr: ds - type: string - tag: 0 + Reduce Output Operator + key expressions: + expr: key + type: string + expr: hr + type: string + expr: ds + type: string + sort order: +++ + Map-reduce partition columns: + expr: key + type: string + expr: hr + type: string + expr: ds + type: string + tag: 0 b TableScan alias: b Index: ql/src/test/results/clientpositive/regexp_extract.q.out =================================================================== --- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy) @@ -53,20 +53,25 @@ field.delim 9 serialization.format 9 serialization.last.column.takes.rest true - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + isSamplingPred: false + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [tmap:src] @@ -109,34 +114,29 @@ name: default.src Reduce Operator Tree: Extract - Filter Operator - isSamplingPred: false - predicate: - expr: (_col0 < 100) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: regexp_extract(_col1, 'val_(\d+\t\d+)', 1) - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-01-39_424_6724088978945525837/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-01-39_424_6724088978945525837/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: regexp_extract(_col1, 'val_(\d+\t\d+)', 1) + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-02_223_1393307901149024596/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-02_223_1393307901149024596/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator @@ -302,20 +302,25 @@ field.delim 9 serialization.format 9 serialization.last.column.takes.rest true - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string + Filter Operator + isSamplingPred: false + predicate: + expr: (_col0 < 100) + type: boolean + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src [tmap:src] @@ -358,34 +363,29 @@ name: default.src Reduce Operator Tree: Extract - Filter Operator - isSamplingPred: false - predicate: - expr: (_col0 < 100) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: regexp_extract(_col1, 'val_(\d+\t\d+)') - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-01-43_012_4690012969555367930/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-01-43_012_4690012969555367930/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: string + expr: regexp_extract(_col1, 'val_(\d+\t\d+)') + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-14_408_4797588639767330030/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/var/folders/uc/ucuNeMAVGQGzy3459D8z2+++Z0Q/-Tmp-/amarsri/hive_2011-03-17_11-47-14_408_4797588639767330030/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/router_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy) @@ -293,7 +293,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) + expr: ((_col0 > 10) and (_col0 < 20)) type: boolean Select Operator expressions: @@ -401,25 +401,20 @@ TableScan alias: a GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Reduce Output Operator - key expressions: - expr: key - type: string - sort order: + - Map-reduce partition columns: - expr: key - type: string - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + key expressions: + expr: key + type: string + sort order: + + Map-reduce partition columns: + expr: key + type: string + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string b TableScan alias: b @@ -579,7 +574,7 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((_col0 > 10) and (_col0 < 20)) and (_col6 > 15)) and (_col6 < 25)) + expr: ((_col0 > 10) and (_col0 < 20)) type: boolean Select Operator expressions: @@ -858,7 +853,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col4 > 15)) and (_col4 < 25)) and (_col6 = '2008-04-08')) + expr: ((_col0 > 10) and (_col0 < 20)) type: boolean Select Operator expressions: @@ -1221,7 +1216,7 @@ Filter Operator isSamplingPred: false predicate: - expr: (((((_col0 > 10) and (_col0 < 20)) and (_col6 > 15)) and (_col6 < 25)) and (_col2 = '2008-04-08')) + expr: (((_col0 > 10) and (_col0 < 20)) and (_col2 = '2008-04-08')) type: boolean Select Operator expressions: Index: ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- ql/src/test/results/clientpositive/sample1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -39,59 +39,54 @@ predicate: expr: (((hash(rand()) & 2147483647) % 1) = 0) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '2008-04-08') and (hr = '11')) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 Select Operator expressions: - expr: key + expr: UDFToInteger(_col0) + type: int + expr: _col1 type: string - expr: value + expr: _col2 type: string - expr: ds + expr: _col3 type: string - expr: hr - type: string outputColumnNames: _col0, _col1, _col2, _col3 - Select Operator - expressions: - expr: UDFToInteger(_col0) - type: int - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-53-28_734_1650557364525480631/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-53-28_734_1650557364525480631/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value,dt,hr - columns.types int:string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268008 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-22_086_7502476312093213805/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-22_086_7502476312093213805/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value,dt,hr + columns.types int:string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310378242 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [s] Index: ql/src/test/results/clientpositive/sample10.q.out =================================================================== --- ql/src/test/results/clientpositive/sample10.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample10.q.out (working copy) @@ -62,56 +62,46 @@ alias: srcpartbucket GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 0) - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: - expr: ds + Select Operator + expressions: + expr: ds + type: string + outputColumnNames: ds + Group By Operator + aggregations: + expr: count(1) + bucketGroup: false + keys: + expr: ds + type: string + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: string - outputColumnNames: ds - Group By Operator - aggregations: - expr: count(1) - bucketGroup: false - keys: - expr: ds - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: -1 - value expressions: - expr: _col1 - type: bigint + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: -1 + value expressions: + expr: _col1 + type: bigint Needs Tagging: false Path -> Alias: - pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11/000000_0 [srcpartbucket] - pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12/000000_0 [srcpartbucket] - pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11/000000_0 [srcpartbucket] - pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12/000000_0 [srcpartbucket] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11 [srcpartbucket] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12 [srcpartbucket] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11 [srcpartbucket] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12 [srcpartbucket] Path -> Partition: - pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11/000000_0 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11 Partition - base file name: 000000_0 + base file name: hr=11 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat partition values: @@ -124,7 +114,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=11 name default.srcpartbucket numFiles 4 numPartitions 4 @@ -135,7 +125,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 687 - transient_lastDdlTime 1306985008 + transient_lastDdlTime 1310378253 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -147,7 +137,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket name default.srcpartbucket numFiles 16 numPartitions 4 @@ -158,13 +148,13 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1306985009 + transient_lastDdlTime 1310378254 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket - pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12/000000_0 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12 Partition - base file name: 000000_0 + base file name: hr=12 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat partition values: @@ -177,7 +167,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-08/hr=12 name default.srcpartbucket numFiles 4 numPartitions 4 @@ -188,7 +178,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 687 - transient_lastDdlTime 1306985008 + transient_lastDdlTime 1310378253 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -200,7 +190,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket name default.srcpartbucket numFiles 16 numPartitions 4 @@ -211,13 +201,13 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1306985009 + transient_lastDdlTime 1310378254 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket - pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11/000000_0 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11 Partition - base file name: 000000_0 + base file name: hr=11 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat partition values: @@ -230,7 +220,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=11 name default.srcpartbucket numFiles 4 numPartitions 4 @@ -241,7 +231,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 687 - transient_lastDdlTime 1306985009 + transient_lastDdlTime 1310378254 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -253,7 +243,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket name default.srcpartbucket numFiles 16 numPartitions 4 @@ -264,13 +254,13 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1306985009 + transient_lastDdlTime 1310378254 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket - pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12/000000_0 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12 Partition - base file name: 000000_0 + base file name: hr=12 input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat partition values: @@ -283,7 +273,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket/ds=2008-04-09/hr=12 name default.srcpartbucket numFiles 4 numPartitions 4 @@ -294,7 +284,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 687 - transient_lastDdlTime 1306985009 + transient_lastDdlTime 1310378254 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat @@ -306,7 +296,7 @@ columns.types string:string file.inputformat org.apache.hadoop.hive.ql.io.RCFileInputFormat file.outputformat org.apache.hadoop.hive.ql.io.RCFileOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/srcpartbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpartbucket name default.srcpartbucket numFiles 16 numPartitions 4 @@ -317,7 +307,7 @@ serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe totalSize 2748 - transient_lastDdlTime 1306985009 + transient_lastDdlTime 1310378254 serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket name: default.srcpartbucket @@ -341,9 +331,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/tomasz/hive_2011-06-01_20-23-29_444_7696526641405452058/-ext-10001 + directory: file:/tmp/amarsri/hive_2011-07-11_02-57-34_199_3121154239583559117/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/tomasz/hive_2011-06-01_20-23-29_444_7696526641405452058/-ext-10001/ + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-57-34_199_3121154239583559117/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -366,14 +356,14 @@ PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/tmp/tomasz/hive_2011-06-01_20-23-30_508_5406045561869452297/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-34_266_8959693938921606201/-mr-10000 POSTHOOK: query: select ds, count(1) from srcpartbucket tablesample (bucket 1 out of 4 on key) where ds is not null group by ds POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/tmp/tomasz/hive_2011-06-01_20-23-30_508_5406045561869452297/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-34_266_8959693938921606201/-mr-10000 POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -390,14 +380,14 @@ PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/tmp/tomasz/hive_2011-06-01_20-23-35_137_2751107101196563632/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-37_721_8565979138240547969/-mr-10000 POSTHOOK: query: select ds, count(1) from srcpartbucket tablesample (bucket 1 out of 2 on key) where ds is not null group by ds POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/tmp/tomasz/hive_2011-06-01_20-23-35_137_2751107101196563632/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-37_721_8565979138240547969/-mr-10000 POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] @@ -414,14 +404,14 @@ PREHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 PREHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -PREHOOK: Output: file:/tmp/tomasz/hive_2011-06-01_20-23-41_104_4777815480864595260/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-41_141_6108039306390462068/-mr-10000 POSTHOOK: query: select * from srcpartbucket where ds is not null POSTHOOK: type: QUERY POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-08/hr=12 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=11 POSTHOOK: Input: default@srcpartbucket@ds=2008-04-09/hr=12 -POSTHOOK: Output: file:/tmp/tomasz/hive_2011-06-01_20-23-41_104_4777815480864595260/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-41_141_6108039306390462068/-mr-10000 POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: srcpartbucket PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- ql/src/test/results/clientpositive/sample2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -35,55 +35,50 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268044 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310378262 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -93,12 +88,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,12 +105,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -127,14 +122,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10002 - destination: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10000 + source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10002 + destination: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10000 + source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -144,28 +139,28 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268044 + transient_lastDdlTime 1310378262 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 - tmp directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10001 + tmp directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10000 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -176,12 +171,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268044 + transient_lastDdlTime 1310378262 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 TotalFiles: 1 @@ -189,9 +184,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10002 [pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10002] + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10002 [pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10002] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-04_767_3073506034460854405/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-42_556_3775563263647495718/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -202,12 +197,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268044 + transient_lastDdlTime 1310378262 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -218,12 +213,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268044 + transient_lastDdlTime 1310378262 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 name: default.dest1 @@ -244,11 +239,11 @@ PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-09_653_6287363092772761622/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-46_140_4116462247752325624/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-09_653_6287363092772761622/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-46_140_4116462247752325624/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 474 val_475 Index: ql/src/test/results/clientpositive/sample3.q.out =================================================================== --- ql/src/test/results/clientpositive/sample3.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample3.q.out (working copy) @@ -26,21 +26,17 @@ predicate: expr: (((hash(key) & 2147483647) % 5) = 0) type: boolean - Filter Operator - predicate: - expr: (((hash(key) & 2147483647) % 5) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: int + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- ql/src/test/results/clientpositive/sample4.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -35,55 +35,50 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268053 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310378269 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -93,12 +88,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,12 +105,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -127,14 +122,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10002 - destination: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10000 + source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10002 + destination: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10000 + source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -144,28 +139,28 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268053 + transient_lastDdlTime 1310378269 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 - tmp directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10001 + tmp directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10000 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -176,12 +171,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268053 + transient_lastDdlTime 1310378269 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 TotalFiles: 1 @@ -189,9 +184,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10002 [pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10002] + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10002 [pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10002] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-14_006_7513722082543864748/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-49_352_3228233042050354215/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -202,12 +197,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268053 + transient_lastDdlTime 1310378269 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -218,12 +213,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268053 + transient_lastDdlTime 1310378269 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 name: default.dest1 @@ -244,11 +239,11 @@ PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-18_801_912578276922482779/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-52_947_159525324700836003/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-18_801_912578276922482779/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-57-52_947_159525324700836003/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 474 val_475 Index: ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- ql/src/test/results/clientpositive/sample5.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -33,48 +33,43 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 5) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 5) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-19_492_6757110521675833834/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-19_492_6757110521675833834/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268059 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-53_347_8409652609921946176/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-57-53_347_8409652609921946176/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310378273 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket [s] Index: ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- ql/src/test/results/clientpositive/sample6.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -33,55 +33,50 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 0) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268067 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310378280 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -91,12 +86,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -108,12 +103,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -125,14 +120,14 @@ Move Operator files: hdfs directory: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10002 - destination: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10000 + source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10002 + destination: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10000 Stage: Stage-0 Move Operator tables: replace: true - source: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10000 + source: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10000 table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -142,28 +137,28 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268067 + transient_lastDdlTime 1310378280 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 - tmp directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10001 + tmp directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10001 Stage: Stage-2 Stats-Aggr Operator - Stats Aggregation Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10000/ + Stats Aggregation Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10000/ Stage: Stage-3 Map Reduce Alias -> Map Operator Tree: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10002 File Output Operator compressed: false GlobalTableId: 0 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10000 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10000 NumFilesPerFileSink: 1 table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -174,12 +169,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268067 + transient_lastDdlTime 1310378280 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 TotalFiles: 1 @@ -187,9 +182,9 @@ MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10002 [pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10002] + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10002 [pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10002] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-27_627_3309354126941173289/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-00_018_458728813403565509/-ext-10002 Partition base file name: -ext-10002 input format: org.apache.hadoop.mapred.TextInputFormat @@ -200,12 +195,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268067 + transient_lastDdlTime 1310378280 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -216,12 +211,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 name default.dest1 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268067 + transient_lastDdlTime 1310378280 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 name: default.dest1 @@ -242,11 +237,11 @@ PREHOOK: query: SELECT dest1.* FROM dest1 PREHOOK: type: QUERY PREHOOK: Input: default@dest1 -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-33_389_7760677817749293223/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-03_687_2062247448126517612/-mr-10000 POSTHOOK: query: SELECT dest1.* FROM dest1 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest1 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-33_389_7760677817749293223/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-03_687_2062247448126517612/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 468 val_469 @@ -522,42 +517,37 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 3) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 3) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket1.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket1.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -567,12 +557,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -584,12 +574,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -598,9 +588,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-04-19_19-54-33_827_389668384025414462/-ext-10001 + directory: file:/tmp/amarsri/hive_2011-07-11_02-58-03_815_3440650522759974343/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-04-19_19-54-33_827_389668384025414462/-ext-10001/ + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-58-03_815_3440650522759974343/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -621,12 +611,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-34_030_551214422770351063/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-03_854_6271750517263563878/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 4 OUT OF 4 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-34_030_551214422770351063/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-03_854_6271750517263563878/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 3 val_4 @@ -892,42 +882,37 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -937,12 +922,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -954,12 +939,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -968,9 +953,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-04-19_19-54-37_207_200034802818981990/-ext-10001 + directory: file:/tmp/amarsri/hive_2011-07-11_02-58-06_669_1410317577849689409/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-04-19_19-54-37_207_200034802818981990/-ext-10001/ + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-58-06_669_1410317577849689409/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -991,12 +976,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-37_421_4651239266027596073/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-06_708_7756573301727009933/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-37_421_4651239266027596073/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-06_708_7756573301727009933/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 0 val_0 @@ -1516,40 +1501,35 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 3) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 3) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat @@ -1561,12 +1541,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -1578,12 +1558,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -1592,9 +1572,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-04-19_19-54-40_626_8344413354265807082/-ext-10001 + directory: file:/tmp/amarsri/hive_2011-07-11_02-58-09_360_6989967716610636074/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-04-19_19-54-40_626_8344413354265807082/-ext-10001/ + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-58-09_360_6989967716610636074/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1615,12 +1595,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-40_714_2969909884270153149/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-09_405_8746850347504403733/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 3 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-40_714_2969909884270153149/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-09_405_8746850347504403733/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 0 val_0 @@ -1983,40 +1963,35 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 3) = 1) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 3) = 1) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat @@ -2028,12 +2003,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2045,12 +2020,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -2059,9 +2034,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-04-19_19-54-43_892_7813003909078968435/-ext-10001 + directory: file:/tmp/amarsri/hive_2011-07-11_02-58-12_091_1241249343366878880/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-04-19_19-54-43_892_7813003909078968435/-ext-10001/ + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-58-12_091_1241249343366878880/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2082,12 +2057,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-43_980_1056408795977903895/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-12_130_7467195593406386569/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket TABLESAMPLE (BUCKET 2 OUT OF 3 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-43_980_1056408795977903895/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-12_130_7467195593406386569/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 1 val_2 @@ -2436,43 +2411,37 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt [s] - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket2 [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket20.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket2 Partition - base file name: srcbucket20.txt + base file name: srcbucket2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -2482,12 +2451,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket2 name default.srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260544 + transient_lastDdlTime 1310371848 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2499,61 +2468,23 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket2 name default.srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260544 + transient_lastDdlTime 1310371848 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 name: default.srcbucket2 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket22.txt - Partition - base file name: srcbucket22.txt - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count 4 - bucket_field_name key - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 - name default.srcbucket2 - serialization.ddl struct srcbucket2 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260544 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count 4 - bucket_field_name key - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 - name default.srcbucket2 - serialization.ddl struct srcbucket2 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260544 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.srcbucket2 - name: default.srcbucket2 Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-04-19_19-54-47_115_1778465985574852574/-ext-10001 + directory: file:/tmp/amarsri/hive_2011-07-11_02-58-14_778_4777537004727741553/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-04-19_19-54-47_115_1778465985574852574/-ext-10001/ + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-58-14_778_4777537004727741553/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2574,12 +2505,12 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket2 -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-47_633_8309063970411271815/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-14_817_7365496467903983867/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket2 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-47_633_8309063970411271815/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-14_817_7365496467903983867/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 0 val_0 @@ -2588,26 +2519,62 @@ 2 val_2 4 val_4 8 val_8 +10 val_10 +12 val_12 +12 val_12 +18 val_18 +18 val_18 20 val_20 24 val_24 24 val_24 26 val_26 26 val_26 28 val_28 +30 val_30 +34 val_34 42 val_42 42 val_42 44 val_44 +54 val_54 +58 val_58 +58 val_58 64 val_64 66 val_66 +70 val_70 +70 val_70 +70 val_70 +72 val_72 +72 val_72 +74 val_74 +76 val_76 +76 val_76 +78 val_78 80 val_80 82 val_82 84 val_84 84 val_84 86 val_86 +90 val_90 +90 val_90 +90 val_90 +92 val_92 +96 val_96 +98 val_98 +98 val_98 +100 val_100 +100 val_100 +104 val_104 +104 val_104 114 val_114 116 val_116 118 val_118 118 val_118 +120 val_120 +120 val_120 +126 val_126 +128 val_128 +128 val_128 +128 val_128 134 val_134 134 val_134 136 val_136 @@ -2615,11 +2582,19 @@ 138 val_138 138 val_138 138 val_138 +146 val_146 +146 val_146 150 val_150 152 val_152 152 val_152 156 val_156 158 val_158 +160 val_160 +162 val_162 +164 val_164 +164 val_164 +166 val_166 +168 val_168 170 val_170 172 val_172 172 val_172 @@ -2628,6 +2603,8 @@ 176 val_176 176 val_176 178 val_178 +180 val_180 +186 val_186 190 val_190 192 val_192 194 val_194 @@ -2638,18 +2615,38 @@ 208 val_208 208 val_208 208 val_208 +214 val_214 +216 val_216 +216 val_216 +218 val_218 222 val_222 224 val_224 224 val_224 226 val_226 228 val_228 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +230 val_230 +238 val_238 +238 val_238 242 val_242 242 val_242 244 val_244 248 val_248 +252 val_252 +256 val_256 +256 val_256 +258 val_258 260 val_260 262 val_262 266 val_266 +272 val_272 +272 val_272 +274 val_274 +278 val_278 +278 val_278 280 val_280 280 val_280 282 val_282 @@ -2658,6 +2655,14 @@ 286 val_286 288 val_288 288 val_288 +292 val_292 +296 val_296 +298 val_298 +298 val_298 +298 val_298 +302 val_302 +306 val_306 +308 val_308 310 val_310 316 val_316 316 val_316 @@ -2665,12 +2670,34 @@ 318 val_318 318 val_318 318 val_318 +322 val_322 +322 val_322 332 val_332 336 val_336 338 val_338 +342 val_342 +342 val_342 +344 val_344 +344 val_344 +348 val_348 +348 val_348 +348 val_348 +348 val_348 +348 val_348 356 val_356 +360 val_360 +362 val_362 +364 val_364 +366 val_366 +368 val_368 374 val_374 378 val_378 +382 val_382 +382 val_382 +384 val_384 +384 val_384 +384 val_384 +386 val_386 392 val_392 394 val_394 396 val_396 @@ -2684,11 +2711,28 @@ 406 val_406 406 val_406 406 val_406 +414 val_414 +414 val_414 +418 val_418 424 val_424 424 val_424 +430 val_430 +430 val_430 +430 val_430 +432 val_432 +436 val_436 +438 val_438 +438 val_438 +438 val_438 444 val_444 446 val_446 448 val_448 +452 val_452 +454 val_454 +454 val_454 +454 val_454 +458 val_458 +458 val_458 460 val_460 462 val_462 462 val_462 @@ -2699,11 +2743,23 @@ 468 val_468 468 val_468 468 val_468 +470 val_470 +472 val_472 +478 val_478 +478 val_478 480 val_480 480 val_480 480 val_480 482 val_482 484 val_484 +490 val_490 +492 val_492 +492 val_492 +494 val_494 +496 val_496 +498 val_498 +498 val_498 +498 val_498 PREHOOK: query: EXPLAIN EXTENDED SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s ORDER BY key, value PREHOOK: type: QUERY @@ -2728,42 +2784,37 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 1) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 4) = 1) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt [s] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket2 [s] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2/srcbucket21.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket2 Partition - base file name: srcbucket21.txt + base file name: srcbucket2 input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -2773,12 +2824,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket2 name default.srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260544 + transient_lastDdlTime 1310371848 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -2790,12 +2841,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket2 + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket2 name default.srcbucket2 serialization.ddl struct srcbucket2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260544 + transient_lastDdlTime 1310371848 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket2 name: default.srcbucket2 @@ -2804,9 +2855,9 @@ File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-04-19_19-54-51_203_3981912443934260367/-ext-10001 + directory: file:/tmp/amarsri/hive_2011-07-11_02-58-17_484_3834914689711003679/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-04-19_19-54-51_203_3981912443934260367/-ext-10001/ + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-58-17_484_3834914689711003679/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2827,34 +2878,115 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket2 -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-51_536_2222098349658422634/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-17_524_6116746546442362470/-mr-10000 POSTHOOK: query: SELECT s.* FROM srcbucket2 TABLESAMPLE (BUCKET 2 OUT OF 4 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket2 -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-51_536_2222098349658422634/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-17_524_6116746546442362470/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] 5 val_5 5 val_5 5 val_5 9 val_9 +17 val_17 +33 val_33 +37 val_37 +37 val_37 41 val_41 +53 val_53 +57 val_57 +65 val_65 +69 val_69 +77 val_77 85 val_85 +97 val_97 +97 val_97 +105 val_105 +113 val_113 +113 val_113 +125 val_125 +125 val_125 +129 val_129 +129 val_129 133 val_133 137 val_137 137 val_137 +145 val_145 +149 val_149 +149 val_149 +153 val_153 +157 val_157 +165 val_165 +165 val_165 +169 val_169 +169 val_169 +169 val_169 +169 val_169 177 val_177 +181 val_181 +189 val_189 +193 val_193 +193 val_193 +193 val_193 +197 val_197 +197 val_197 +201 val_201 +205 val_205 +205 val_205 +209 val_209 +209 val_209 +213 val_213 +213 val_213 +217 val_217 +217 val_217 221 val_221 221 val_221 229 val_229 229 val_229 +233 val_233 +233 val_233 +237 val_237 +237 val_237 +241 val_241 +249 val_249 +257 val_257 265 val_265 265 val_265 +273 val_273 +273 val_273 +273 val_273 +277 val_277 +277 val_277 +277 val_277 +277 val_277 +281 val_281 +281 val_281 +285 val_285 +289 val_289 +305 val_305 +309 val_309 +309 val_309 317 val_317 317 val_317 +321 val_321 +321 val_321 +325 val_325 +325 val_325 +333 val_333 +333 val_333 +341 val_341 +345 val_345 353 val_353 353 val_353 +365 val_365 +369 val_369 +369 val_369 +369 val_369 +373 val_373 +377 val_377 +389 val_389 393 val_393 397 val_397 397 val_397 @@ -2866,13 +2998,32 @@ 409 val_409 409 val_409 409 val_409 +413 val_413 +413 val_413 +417 val_417 +417 val_417 +417 val_417 +421 val_421 +429 val_429 +429 val_429 +437 val_437 449 val_449 +453 val_453 +457 val_457 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +469 val_469 +477 val_477 481 val_481 485 val_485 489 val_489 489 val_489 489 val_489 489 val_489 +493 val_493 +497 val_497 PREHOOK: query: CREATE TABLE empty_bucket (key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE empty_bucket (key int, value string) CLUSTERED BY (key) INTO 2 BUCKETS STORED AS TEXTFILE @@ -2904,44 +3055,80 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean - Select Operator - expressions: - expr: key + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value + expr: _col1 type: string - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - expr: _col1 - type: string - sort order: ++ - tag: -1 - value expressions: - expr: _col0 - type: int - expr: _col1 - type: string + sort order: ++ + tag: -1 + value expressions: + expr: _col0 + type: int + expr: _col1 + type: string Needs Tagging: false + Path -> Alias: + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/empty_bucket [s] + Path -> Partition: + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/empty_bucket + Partition + base file name: empty_bucket + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count 2 + bucket_field_name key + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/empty_bucket + name default.empty_bucket + serialization.ddl struct empty_bucket { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310378300 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count 2 + bucket_field_name key + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/empty_bucket + name default.empty_bucket + serialization.ddl struct empty_bucket { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310378300 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.empty_bucket + name: default.empty_bucket Reduce Operator Tree: Extract File Output Operator compressed: false GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-04-19_19-54-54_970_837635628196553945/-ext-10001 + directory: file:/tmp/amarsri/hive_2011-07-11_02-58-20_219_7398883575090456577/-ext-10001 NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-04-19_19-54-54_970_837635628196553945/-ext-10001/ + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-58-20_219_7398883575090456577/-ext-10001/ table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -2962,11 +3149,11 @@ ORDER BY key, value PREHOOK: type: QUERY PREHOOK: Input: default@empty_bucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-55_069_4633328979298722063/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-20_257_2520214403406970744/-mr-10000 POSTHOOK: query: SELECT s.* FROM empty_bucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) s ORDER BY key, value POSTHOOK: type: QUERY POSTHOOK: Input: default@empty_bucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-54-55_069_4633328979298722063/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-20_257_2520214403406970744/-mr-10000 POSTHOOK: Lineage: dest1.key SIMPLE [(srcbucket)s.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: dest1.value SIMPLE [(srcbucket)s.FieldSchema(name:value, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -37,51 +37,46 @@ Filter Operator isSamplingPred: false predicate: - expr: ((((hash(key) & 2147483647) % 4) = 0) and (key > 100)) + expr: (key > 100) type: boolean Filter Operator isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key - type: int - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-58_518_7055483572049586597/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-19_19-54-58_518_7055483572049586597/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value - columns.types int:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 - name default.dest1 - serialization.ddl struct dest1 { i32 key, string value} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303268098 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-23_160_5772419563904958236/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_02-58-23_160_5772419563904958236/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value + columns.types int:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 + name default.dest1 + serialization.ddl struct dest1 { i32 key, string value} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310378303 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s] Index: ql/src/test/results/clientpositive/sample8.q.out =================================================================== --- ql/src/test/results/clientpositive/sample8.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample8.q.out (working copy) @@ -33,49 +33,39 @@ alias: s GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 1) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 1) = 0) - type: boolean - Reduce Output Operator - sort order: - tag: 0 - value expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string + Reduce Output Operator + sort order: + tag: 0 + value expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string t TableScan alias: t GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 10) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 10) = 0) - type: boolean - Reduce Output Operator - sort order: - tag: 1 - value expressions: - expr: key - type: string - expr: value - type: string + Reduce Output Operator + sort order: + tag: 1 + value expressions: + expr: key + type: string + expr: value + type: string Needs Tagging: true Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [t, s] Index: ql/src/test/results/clientpositive/sample9.q.out =================================================================== --- ql/src/test/results/clientpositive/sample9.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/sample9.q.out (working copy) @@ -22,52 +22,47 @@ alias: a GatherStats: false Filter Operator - isSamplingPred: false + isSamplingPred: true predicate: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean - Filter Operator - isSamplingPred: true - predicate: - expr: (((hash(key) & 2147483647) % 2) = 0) - type: boolean + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: _col0 type: int - expr: value + expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-04-19_19-55-15_093_8705959538820118982/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-04-19_19-55-15_093_8705959538820118982/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types int:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/tmp/amarsri/hive_2011-07-11_02-58-35_476_3346956740908751236/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-07-11_02-58-35_476_3346956740908751236/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types int:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Needs Tagging: false Path -> Alias: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt [s:a] + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket [s:a] Path -> Partition: - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket Partition - base file name: srcbucket0.txt + base file name: srcbucket input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: @@ -77,12 +72,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat @@ -94,12 +89,12 @@ columns.types int:string file.inputformat org.apache.hadoop.mapred.TextInputFormat file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket name default.srcbucket serialization.ddl struct srcbucket { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1303260542 + transient_lastDdlTime 1310371847 serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket name: default.srcbucket @@ -113,12 +108,12 @@ FROM (SELECT a.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) a) s PREHOOK: type: QUERY PREHOOK: Input: default@srcbucket -PREHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-55-15_304_876052844698471163/-mr-10000 +PREHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-35_521_5870581689660228083/-mr-10000 POSTHOOK: query: SELECT s.* FROM (SELECT a.* FROM srcbucket TABLESAMPLE (BUCKET 1 OUT OF 2 on key) a) s POSTHOOK: type: QUERY POSTHOOK: Input: default@srcbucket -POSTHOOK: Output: file:/tmp/sdong/hive_2011-04-19_19-55-15_304_876052844698471163/-mr-10000 +POSTHOOK: Output: file:/tmp/amarsri/hive_2011-07-11_02-58-35_521_5870581689660228083/-mr-10000 474 val_475 62 val_63 468 val_469 Index: ql/src/test/results/clientpositive/semijoin.q.out =================================================================== --- ql/src/test/results/clientpositive/semijoin.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/semijoin.q.out (working copy) @@ -517,33 +517,29 @@ predicate: expr: (key < '15') type: boolean - Filter Operator - predicate: - expr: (key < '15') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: int + outputColumnNames: key + Group By Operator + bucketGroup: false + keys: expr: key type: int - outputColumnNames: key - Group By Operator - bucketGroup: false - keys: - expr: key + expr: key + type: int + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col1 type: int - expr: key + sort order: + + Map-reduce partition columns: + expr: _col1 type: int - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col1 - type: int - sort order: + - Map-reduce partition columns: - expr: _col1 - type: int - tag: 1 + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -653,35 +649,31 @@ predicate: expr: (value < 'val_10') type: boolean - Filter Operator - predicate: - expr: (value < 'val_10') - type: boolean - Select Operator - expressions: + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: key, value + Group By Operator + bucketGroup: false + keys: expr: key type: int expr: value type: string - outputColumnNames: key, value - Group By Operator - bucketGroup: false - keys: - expr: key + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: + expr: _col0 type: int - expr: value - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -787,36 +779,32 @@ predicate: expr: (key > 5) type: boolean - Filter Operator - predicate: - expr: (key > 5) - type: boolean + Select Operator + expressions: + expr: key + type: int + outputColumnNames: _col0 Select Operator expressions: - expr: key + expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col0 type: int + mode: hash outputColumnNames: _col0 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -916,46 +904,38 @@ predicate: expr: ((key > 5) and (value <= 'val_20')) type: boolean - Filter Operator - predicate: - expr: (key > 5) - type: boolean + Select Operator + expressions: + expr: key + type: int + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: _col0 type: int - expr: value + expr: _col1 type: string outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col1 <= 'val_20') - type: boolean - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: + expr: _col0 + type: int + expr: _col1 + type: string + mode: hash + outputColumnNames: _col0, _col1 + Reduce Output Operator + key expressions: expr: _col0 type: int - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - Group By Operator - bucketGroup: false - keys: - expr: _col0 - type: int - expr: _col1 - type: string - mode: hash - outputColumnNames: _col0, _col1 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: @@ -1054,36 +1034,32 @@ predicate: expr: (key > 2) type: boolean - Filter Operator - predicate: - expr: (key > 2) - type: boolean + Select Operator + expressions: + expr: key + type: int + outputColumnNames: _col0 Select Operator expressions: - expr: key + expr: _col0 type: int outputColumnNames: _col0 - Select Operator - expressions: + Group By Operator + bucketGroup: false + keys: expr: _col0 type: int + mode: hash outputColumnNames: _col0 - Group By Operator - bucketGroup: false - keys: + Reduce Output Operator + key expressions: expr: _col0 type: int - mode: hash - outputColumnNames: _col0 - Reduce Output Operator - key expressions: - expr: _col0 - type: int - sort order: + - Map-reduce partition columns: - expr: _col0 - type: int - tag: 1 + sort order: + + Map-reduce partition columns: + expr: _col0 + type: int + tag: 1 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/set_processor_namespaces.q.out =================================================================== --- ql/src/test/results/clientpositive/set_processor_namespaces.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/set_processor_namespaces.q.out (working copy) @@ -25,23 +25,19 @@ predicate: expr: (key = 5) type: boolean - Filter Operator - predicate: - expr: (key = 5) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/skewjoin.q.out =================================================================== --- ql/src/test/results/clientpositive/skewjoin.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/skewjoin.q.out (working copy) @@ -1198,22 +1198,18 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 100) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 0 - value expressions: - expr: _col0 - type: string + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 0 + value expressions: + expr: _col0 + type: string src2:src TableScan alias: src @@ -1248,19 +1244,15 @@ expr: key type: string outputColumnNames: _col0 - Filter Operator - predicate: - expr: (_col0 < 80) - type: boolean - Reduce Output Operator - key expressions: - expr: _col0 - type: string - sort order: + - Map-reduce partition columns: - expr: _col0 - type: string - tag: 2 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + sort order: + + Map-reduce partition columns: + expr: _col0 + type: string + tag: 2 Reduce Operator Tree: Join Operator condition map: Index: ql/src/test/results/clientpositive/smb_mapjoin9.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin9.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/smb_mapjoin9.q.out (working copy) @@ -88,27 +88,23 @@ expr: _col7 type: string outputColumnNames: _col0, _col2, _col5, _col6, _col7 - Filter Operator - predicate: - expr: (((_col2 = '2010-10-15') and (_col7 = '2010-10-15')) and _col5 is not null) - type: boolean - Select Operator - expressions: - expr: _col5 - type: int - expr: _col6 - type: string - expr: _col7 - type: string - expr: _col0 - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col5 + type: int + expr: _col6 + type: string + expr: _col7 + type: string + expr: _col0 + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/smb_mapjoin_6.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/smb_mapjoin_6.q.out (working copy) @@ -2634,29 +2634,25 @@ expr: _col5 type: string outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: (_col0 > 1000) - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col4 + type: int + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-0 Move Operator @@ -2790,29 +2786,25 @@ expr: _col5 type: string outputColumnNames: _col0, _col1, _col4, _col5 - Filter Operator - predicate: - expr: (_col0 > 1000) - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.smb_join_results + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col4 + type: int + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.smb_join_results Stage: Stage-0 Move Operator @@ -2958,31 +2950,27 @@ expr: _col9 type: string outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 - Filter Operator - predicate: - expr: (_col0 > 1000) - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col4 - type: int - expr: _col5 - type: string - expr: _col8 - type: int - expr: _col9 - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col4 + type: int + expr: _col5 + type: string + expr: _col8 + type: int + expr: _col9 + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/split_sample.q.out =================================================================== --- ql/src/test/results/clientpositive/split_sample.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/split_sample.q.out (working copy) @@ -4111,17 +4111,17 @@ 1 {VALUE._col0} handleSkewJoin: false outputColumnNames: _col0, _col4 - Select Operator - expressions: - expr: _col0 - type: int - expr: _col4 - type: int - outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: ((_col1) IN (199, 10199, 20199) or (_col0) IN (199, 10199, 20199)) - type: boolean + Filter Operator + predicate: + expr: ((_col4) IN (199, 10199, 20199) or (_col0) IN (199, 10199, 20199)) + type: boolean + Select Operator + expressions: + expr: _col0 + type: int + expr: _col4 + type: int + outputColumnNames: _col0, _col1 Select Operator expressions: expr: _col0 Index: ql/src/test/results/clientpositive/stats11.q.out =================================================================== --- ql/src/test/results/clientpositive/stats11.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/stats11.q.out (working copy) @@ -162,46 +162,41 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_20-41-18_163_6560883492277571896/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_20-41-18_163_6560883492277571896/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - transient_lastDdlTime 1306986078 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_03-10-59_585_1038860316679520823/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_03-10-59_585_1038860316679520823/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + transient_lastDdlTime 1310379059 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false @@ -593,51 +588,46 @@ expr: _col6 type: string outputColumnNames: _col0, _col1, _col5, _col6 - Filter Operator - isSamplingPred: false - predicate: - expr: (_col6 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: int - expr: _col1 - type: string - expr: _col5 - type: string - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_20-41-49_304_6102645690321346948/-ext-10002 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_20-41-49_304_6102645690321346948/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns key,value1,value2 - columns.types string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result - name default.bucketmapjoin_tmp_result - numFiles 1 - numPartitions 0 - numRows 464 - rawDataSize 8519 - serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8983 - transient_lastDdlTime 1306986098 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.bucketmapjoin_tmp_result - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false + Select Operator + expressions: + expr: _col0 + type: int + expr: _col1 + type: string + expr: _col5 + type: string + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_03-11-25_827_1887525539038984243/-ext-10002 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_03-11-25_827_1887525539038984243/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns key,value1,value2 + columns.types string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/bucketmapjoin_tmp_result + name default.bucketmapjoin_tmp_result + numFiles 1 + numPartitions 0 + numRows 464 + rawDataSize 8519 + serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 8983 + transient_lastDdlTime 1310379076 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.bucketmapjoin_tmp_result + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false Local Work: Map Reduce Local Work Needs Tagging: false Index: ql/src/test/results/clientpositive/stats2.q.out =================================================================== --- ql/src/test/results/clientpositive/stats2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/stats2.q.out (working copy) @@ -23,29 +23,25 @@ srcpart TableScan alias: srcpart - Filter Operator - predicate: - expr: ds is not null - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.analyze_t1 + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.analyze_t1 Stage: Stage-0 Move Operator Index: ql/src/test/results/clientpositive/subq.q.out =================================================================== --- ql/src/test/results/clientpositive/subq.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/subq.q.out (working copy) @@ -31,30 +31,26 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 Select Operator expressions: - expr: key + expr: _col0 type: string - expr: value + expr: _col1 type: string outputColumnNames: _col0, _col1 - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-4 Conditional Operator Index: ql/src/test/results/clientpositive/subq2.q.out =================================================================== --- ql/src/test/results/clientpositive/subq2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/subq2.q.out (working copy) @@ -69,23 +69,19 @@ expr: _col1 type: bigint outputColumnNames: _col0, _col1 - Filter Operator - predicate: - expr: (_col0 >= 90) - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: bigint - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: bigint + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy) @@ -50,22 +50,27 @@ columns.types string,string,string field.delim 9 serialization.format 9 - Reduce Output Operator - key expressions: - expr: _col1 - type: string - sort order: + - Map-reduce partition columns: - expr: _col1 - type: string - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string + Filter Operator + isSamplingPred: false + predicate: + expr: ((_col1 < 100) and (_col0 = '2008-04-08')) + type: boolean + Reduce Output Operator + key expressions: + expr: _col1 + type: string + sort order: + + Map-reduce partition columns: + expr: _col1 + type: string + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [tmap:src] @@ -239,34 +244,29 @@ name: default.srcpart Reduce Operator Tree: Extract - Filter Operator - isSamplingPred: false - predicate: - expr: ((_col1 < 100) and (_col0 = '2008-04-08')) - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-24-54_583_8987862490121410836/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-24-54_583_8987862490121410836/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/tmp/amarsri/hive_2011-03-19_01-46-48_559_8291501891794719709/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-03-19_01-46-48_559_8291501891794719709/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/transform_ppr2.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/transform_ppr2.q.out (working copy) @@ -33,30 +33,30 @@ TableScan alias: src GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (ds = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: ds - type: string - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1, _col2 - Transform Operator - command: /bin/cat - output info: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1,_col2 - columns.types string,string,string - field.delim 9 - serialization.format 9 + Select Operator + expressions: + expr: ds + type: string + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1, _col2 + Transform Operator + command: /bin/cat + output info: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1,_col2 + columns.types string,string,string + field.delim 9 + serialization.format 9 + Filter Operator + isSamplingPred: false + predicate: + expr: (_col1 < 100) + type: boolean Reduce Output Operator key expressions: expr: _col1 @@ -162,34 +162,29 @@ name: default.srcpart Reduce Operator Tree: Extract - Filter Operator - isSamplingPred: false - predicate: - expr: (_col1 < 100) - type: boolean - Select Operator - expressions: - expr: _col1 - type: string - expr: _col2 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - directory: file:/tmp/sdong/hive_2011-02-10_17-24-59_315_262991000601799738/-ext-10001 - NumFilesPerFileSink: 1 - Stats Publishing Key Prefix: file:/tmp/sdong/hive_2011-02-10_17-24-59_315_262991000601799738/-ext-10001/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - columns _col0,_col1 - columns.types string:string - serialization.format 1 - TotalFiles: 1 - GatherStats: false - MultiFileSpray: false + Select Operator + expressions: + expr: _col1 + type: string + expr: _col2 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + directory: file:/tmp/amarsri/hive_2011-03-19_01-46-51_820_4418676794321891468/-ext-10001 + NumFilesPerFileSink: 1 + Stats Publishing Key Prefix: file:/tmp/amarsri/hive_2011-03-19_01-46-51_820_4418676794321891468/-ext-10001/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types string:string + serialization.format 1 + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf1.q.out =================================================================== --- ql/src/test/results/clientpositive/udf1.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf1.q.out (working copy) @@ -51,61 +51,57 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: ('a' like '%a%') - type: boolean - expr: ('b' like '%a%') - type: boolean - expr: ('ab' like '%a%') - type: boolean - expr: ('ab' like '%a_') - type: boolean - expr: ('%_' like '\%\_') - type: boolean - expr: ('ab' like '\%\_') - type: boolean - expr: ('ab' like '_a%') - type: boolean - expr: ('ab' like 'a') - type: boolean - expr: ('' rlike '.*') - type: boolean - expr: ('a' rlike '[ab]') - type: boolean - expr: ('' rlike '[ab]') - type: boolean - expr: ('hadoop' rlike '[a-z]*') - type: boolean - expr: ('hadoop' rlike 'o*') - type: boolean - expr: regexp_replace('abc', 'b', 'c') - type: string - expr: regexp_replace('abc', 'z', 'a') - type: string - expr: regexp_replace('abbbb', 'bb', 'b') - type: string - expr: regexp_replace('hadoop', '(.)[a-z]*', '$1ive') - type: string - expr: regexp_replace('hadoopAAA', 'A.*', '') - type: string - expr: regexp_replace('abc', '', 'A') - type: string - expr: ('abc' rlike '') - type: boolean - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: ('a' like '%a%') + type: boolean + expr: ('b' like '%a%') + type: boolean + expr: ('ab' like '%a%') + type: boolean + expr: ('ab' like '%a_') + type: boolean + expr: ('%_' like '\%\_') + type: boolean + expr: ('ab' like '\%\_') + type: boolean + expr: ('ab' like '_a%') + type: boolean + expr: ('ab' like 'a') + type: boolean + expr: ('' rlike '.*') + type: boolean + expr: ('a' rlike '[ab]') + type: boolean + expr: ('' rlike '[ab]') + type: boolean + expr: ('hadoop' rlike '[a-z]*') + type: boolean + expr: ('hadoop' rlike 'o*') + type: boolean + expr: regexp_replace('abc', 'b', 'c') + type: string + expr: regexp_replace('abc', 'z', 'a') + type: string + expr: regexp_replace('abbbb', 'bb', 'b') + type: string + expr: regexp_replace('hadoop', '(.)[a-z]*', '$1ive') + type: string + expr: regexp_replace('hadoopAAA', 'A.*', '') + type: string + expr: regexp_replace('abc', '', 'A') + type: string + expr: ('abc' rlike '') + type: boolean + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/udf9.q.out =================================================================== --- ql/src/test/results/clientpositive/udf9.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf9.q.out (working copy) @@ -38,51 +38,47 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: datediff('2008-12-31', '2009-01-01') - type: int - expr: datediff('2008-03-01', '2008-02-28') - type: int - expr: datediff('2007-03-01', '2007-01-28') - type: int - expr: datediff('2008-03-01 23:59:59', '2008-03-02 00:00:00') - type: int - expr: date_add('2008-12-31', 1) - type: string - expr: date_add('2008-12-31', 365) - type: string - expr: date_add('2008-02-28', 2) - type: string - expr: date_add('2009-02-28', 2) - type: string - expr: date_add('2007-02-28', 365) - type: string - expr: date_add('2007-02-28 23:59:59', 730) - type: string - expr: date_sub('2009-01-01', 1) - type: string - expr: date_sub('2009-01-01', 365) - type: string - expr: date_sub('2008-02-28', 2) - type: string - expr: date_sub('2009-02-28', 2) - type: string - expr: date_sub('2007-02-28', 365) - type: string - expr: date_sub('2007-02-28 01:12:34', 730) - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: datediff('2008-12-31', '2009-01-01') + type: int + expr: datediff('2008-03-01', '2008-02-28') + type: int + expr: datediff('2007-03-01', '2007-01-28') + type: int + expr: datediff('2008-03-01 23:59:59', '2008-03-02 00:00:00') + type: int + expr: date_add('2008-12-31', 1) + type: string + expr: date_add('2008-12-31', 365) + type: string + expr: date_add('2008-02-28', 2) + type: string + expr: date_add('2009-02-28', 2) + type: string + expr: date_add('2007-02-28', 365) + type: string + expr: date_add('2007-02-28 23:59:59', 730) + type: string + expr: date_sub('2009-01-01', 1) + type: string + expr: date_sub('2009-01-01', 365) + type: string + expr: date_sub('2008-02-28', 2) + type: string + expr: date_sub('2009-02-28', 2) + type: string + expr: date_sub('2007-02-28', 365) + type: string + expr: date_sub('2007-02-28 01:12:34', 730) + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_10_trims.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_10_trims.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_10_trims.q.out (working copy) @@ -37,23 +37,19 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: trim(trim(trim(trim(trim(trim(trim(trim(trim(trim(' abc ')))))))))) - type: string - outputColumnNames: _col0 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dest1 + Select Operator + expressions: + expr: trim(trim(trim(trim(trim(trim(trim(trim(trim(trim(' abc ')))))))))) + type: string + outputColumnNames: _col0 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dest1 Stage: Stage-5 Conditional Operator Index: ql/src/test/results/clientpositive/udf_hour.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_hour.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_hour.q.out (working copy) @@ -40,25 +40,21 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: hour('2009-08-07 13:14:15') - type: int - expr: hour('13:14:15') - type: int - expr: hour('2009-08-07') - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: hour('2009-08-07 13:14:15') + type: int + expr: hour('13:14:15') + type: int + expr: hour('2009-08-07') + type: int + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_isnull_isnotnull.q.out (working copy) @@ -50,26 +50,22 @@ predicate: expr: true is not null type: boolean - Filter Operator - predicate: - expr: true is not null - type: boolean - Select Operator - expressions: - expr: null is null - type: boolean - expr: 1 is not null - type: boolean - expr: 'my string' is not null - type: boolean - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: null is null + type: boolean + expr: 1 is not null + type: boolean + expr: 'my string' is not null + type: boolean + outputColumnNames: _col0, _col1, _col2 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -127,26 +123,22 @@ predicate: expr: (lint is not null and (not mstringstring is null)) type: boolean - Filter Operator - predicate: - expr: (lint is not null and (not mstringstring is null)) - type: boolean - Select Operator - expressions: - expr: lint is not null - type: boolean - expr: lintstring is not null - type: boolean - expr: mstringstring is not null - type: boolean - outputColumnNames: _col0, _col1, _col2 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: lint is not null + type: boolean + expr: lintstring is not null + type: boolean + expr: mstringstring is not null + type: boolean + outputColumnNames: _col0, _col1, _col2 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_like.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_like.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_like.q.out (working copy) @@ -41,43 +41,39 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: ('_%_' like '%\_\%\_%') - type: boolean - expr: ('__' like '%\_\%\_%') - type: boolean - expr: ('%%_%_' like '%\_\%\_%') - type: boolean - expr: ('%_%_%' like '%\%\_\%') - type: boolean - expr: ('_%_' like '\%\_%') - type: boolean - expr: ('%__' like '__\%%') - type: boolean - expr: ('_%' like '\_\%\_\%%') - type: boolean - expr: ('_%' like '\_\%_%') - type: boolean - expr: ('%_' like '\%\_') - type: boolean - expr: ('ab' like '\%\_') - type: boolean - expr: ('ab' like '_a%') - type: boolean - expr: ('ab' like 'a') - type: boolean - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: ('_%_' like '%\_\%\_%') + type: boolean + expr: ('__' like '%\_\%\_%') + type: boolean + expr: ('%%_%_' like '%\_\%\_%') + type: boolean + expr: ('%_%_%' like '%\%\_\%') + type: boolean + expr: ('_%_' like '\%\_%') + type: boolean + expr: ('%__' like '__\%%') + type: boolean + expr: ('_%' like '\_\%\_\%%') + type: boolean + expr: ('_%' like '\_\%_%') + type: boolean + expr: ('%_' like '\%\_') + type: boolean + expr: ('ab' like '\%\_') + type: boolean + expr: ('ab' like '_a%') + type: boolean + expr: ('ab' like 'a') + type: boolean + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_lower.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_lower.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_lower.q.out (working copy) @@ -36,23 +36,19 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: lower('AbC 123') - type: string - expr: upper('AbC 123') - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: lower('AbC 123') + type: string + expr: upper('AbC 123') + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_minute.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_minute.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_minute.q.out (working copy) @@ -40,25 +40,21 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: minute('2009-08-07 13:14:15') - type: int - expr: minute('13:14:15') - type: int - expr: minute('2009-08-07') - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: minute('2009-08-07 13:14:15') + type: int + expr: minute('13:14:15') + type: int + expr: minute('2009-08-07') + type: int + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_notequal.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_notequal.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_notequal.q.out (working copy) @@ -48,23 +48,19 @@ predicate: expr: (key <> '302') type: boolean - Filter Operator - predicate: - expr: (key <> '302') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator @@ -610,23 +606,19 @@ predicate: expr: (key <> '302') type: boolean - Filter Operator - predicate: - expr: (key <> '302') - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_parse_url.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_parse_url.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_parse_url.q.out (working copy) @@ -63,41 +63,37 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PATH') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'REF') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k2') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k3') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'FILE') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PROTOCOL') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'USERINFO') - type: string - expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'AUTHORITY') - type: string - outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PATH') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'REF') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k2') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k1') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'QUERY', 'k3') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'FILE') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'PROTOCOL') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'USERINFO') + type: string + expr: parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'AUTHORITY') + type: string + outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_second.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_second.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_second.q.out (working copy) @@ -40,25 +40,21 @@ predicate: expr: (key = 86) type: boolean - Filter Operator - predicate: - expr: (key = 86) - type: boolean - Select Operator - expressions: - expr: second('2009-08-07 13:14:15') - type: int - expr: second('13:14:15') - type: int - expr: second('2009-08-07') - type: int - outputColumnNames: _col0, _col1, _col2 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: second('2009-08-07 13:14:15') + type: int + expr: second('13:14:15') + type: int + expr: second('2009-08-07') + type: int + outputColumnNames: _col0, _col1, _col2 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/udf_size.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_size.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/udf_size.q.out (working copy) @@ -44,28 +44,24 @@ predicate: expr: (lint is not null and (not mstringstring is null)) type: boolean - Filter Operator - predicate: - expr: (lint is not null and (not mstringstring is null)) - type: boolean - Select Operator - expressions: - expr: size(lint) - type: int - expr: size(lintstring) - type: int - expr: size(mstringstring) - type: int - expr: size(null) - type: int - outputColumnNames: _col0, _col1, _col2, _col3 - Limit - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: size(lint) + type: int + expr: size(lintstring) + type: int + expr: size(mstringstring) + type: int + expr: size(null) + type: int + outputColumnNames: _col0, _col1, _col2, _col3 + Limit + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-0 Fetch Operator Index: ql/src/test/results/clientpositive/union.q.out =================================================================== --- ql/src/test/results/clientpositive/union.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/union.q.out (working copy) @@ -39,31 +39,27 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat null-subquery2:unioninput-subquery2:src TableScan alias: src @@ -71,31 +67,27 @@ predicate: expr: (key > 100) type: boolean - Filter Operator - predicate: - expr: (key > 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 1 - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 1 + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Stage: Stage-4 Conditional Operator Index: ql/src/test/results/clientpositive/union20.q.out =================================================================== --- ql/src/test/results/clientpositive/union20.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/union20.q.out (working copy) @@ -164,23 +164,19 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-4 Map Reduce @@ -192,23 +188,19 @@ predicate: expr: (key < 10) type: boolean - Filter Operator - predicate: - expr: (key < 10) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - outputColumnNames: _col0, _col1 - File Output Operator - compressed: false - GlobalTableId: 0 - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + outputColumnNames: _col0, _col1 + File Output Operator + compressed: false + GlobalTableId: 0 + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat Stage: Stage-5 Map Reduce Index: ql/src/test/results/clientpositive/union22.q.out =================================================================== --- ql/src/test/results/clientpositive/union22.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/union22.q.out (working copy) @@ -105,32 +105,27 @@ predicate: expr: (k0 > 50) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '1') and (k0 > 50)) - type: boolean - Select Operator - expressions: - expr: k1 - type: string - expr: k3 - type: string - expr: k4 - type: string - outputColumnNames: _col1, _col3, _col4 - HashTable Sink Operator - condition expressions: - 0 {k1} {k2} - 1 {_col3} {_col4} - filter predicates: - 0 {(ds = '1')} - 1 - handleSkewJoin: false - keys: - 0 [Column[k1]] - 1 [Column[_col1]] - Position of Big Table: 0 + Select Operator + expressions: + expr: k1 + type: string + expr: k3 + type: string + expr: k4 + type: string + outputColumnNames: _col1, _col3, _col4 + HashTable Sink Operator + condition expressions: + 0 {k1} {k2} + 1 {_col3} {_col4} + filter predicates: + 0 {(ds = '1')} + 1 + handleSkewJoin: false + keys: + 0 [Column[k1]] + 1 [Column[_col1]] + Position of Big Table: 0 Stage: Stage-1 Map Reduce @@ -246,20 +241,80 @@ expr: _col11 type: string outputColumnNames: _col0, _col1, _col10, _col11 + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col10 + type: string + expr: _col11 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + File Output Operator + compressed: false + GlobalTableId: 1 + directory: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_03-36-05_942_4742399607666706050/-ext-10000 + NumFilesPerFileSink: 1 + Static Partition Specification: ds=2/ + Stats Publishing Key Prefix: pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_03-36-05_942_4742399607666706050/-ext-10000/ + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + bucket_count -1 + columns k1,k2,k3,k4 + columns.types string:string:string:string + file.inputformat org.apache.hadoop.mapred.TextInputFormat + file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + location pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dst_union22 + name default.dst_union22 + numFiles 1 + numPartitions 1 + numRows 500 + partition_columns ds + rawDataSize 11124 + serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 11624 + transient_lastDdlTime 1310380562 + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + name: default.dst_union22 + TotalFiles: 1 + GatherStats: true + MultiFileSpray: false + null-subquery1:subq-subquery1:dst_union22_delta + TableScan + alias: dst_union22_delta + GatherStats: false Filter Operator isSamplingPred: false predicate: - expr: (_col0 > 20) + expr: (k0 <= 50) type: boolean Select Operator expressions: - expr: _col0 + expr: k1 type: string - expr: _col1 + expr: k2 type: string - expr: _col10 + expr: k3 type: string - expr: _col11 + expr: k4 type: string outputColumnNames: _col0, _col1, _col2, _col3 Union @@ -307,76 +362,6 @@ TotalFiles: 1 GatherStats: true MultiFileSpray: false - null-subquery1:subq-subquery1:dst_union22_delta - TableScan - alias: dst_union22_delta - GatherStats: false - Filter Operator - isSamplingPred: false - predicate: - expr: (k0 <= 50) - type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: ((ds = '1') and (k0 <= 50)) - type: boolean - Select Operator - expressions: - expr: k1 - type: string - expr: k2 - type: string - expr: k3 - type: string - expr: k4 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - File Output Operator - compressed: false - GlobalTableId: 1 - directory: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_21-10-51_082_6975609978266062150/-ext-10000 - NumFilesPerFileSink: 1 - Static Partition Specification: ds=2/ - Stats Publishing Key Prefix: pfile:/data/users/tomasz/apache-hive/build/ql/scratchdir/hive_2011-06-01_21-10-51_082_6975609978266062150/-ext-10000/ - table: - input format: org.apache.hadoop.mapred.TextInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - properties: - bucket_count -1 - columns k1,k2,k3,k4 - columns.types string:string:string:string - file.inputformat org.apache.hadoop.mapred.TextInputFormat - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - location pfile:/data/users/tomasz/apache-hive/build/ql/test/data/warehouse/dst_union22 - name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 - partition_columns ds - rawDataSize 11124 - serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} - serialization.format 1 - serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 - transient_lastDdlTime 1306987846 - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - name: default.dst_union22 - TotalFiles: 1 - GatherStats: true - MultiFileSpray: false Needs Tagging: false Path -> Alias: file:/tmp/tomasz/hive_2011-06-01_21-10-51_082_6975609978266062150/-mr-10002 [file:/tmp/tomasz/hive_2011-06-01_21-10-51_082_6975609978266062150/-mr-10002] Index: ql/src/test/results/clientpositive/union_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/union_ppr.q.out (revision 1145463) +++ ql/src/test/results/clientpositive/union_ppr.q.out (working copy) @@ -36,60 +36,50 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Filter Operator - isSamplingPred: false - predicate: - expr: (_col2 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: ++++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + sort order: ++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string null-subquery2:a-subquery2:y TableScan alias: y @@ -99,60 +89,50 @@ predicate: expr: (key < 100) type: boolean - Filter Operator - isSamplingPred: false - predicate: - expr: (key < 100) - type: boolean - Select Operator - expressions: - expr: key - type: string - expr: value - type: string - expr: ds - type: string - expr: hr - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Union - Filter Operator - isSamplingPred: false - predicate: - expr: (_col2 = '2008-04-08') - type: boolean - Select Operator - expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - outputColumnNames: _col0, _col1, _col2, _col3 - Reduce Output Operator - key expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string - sort order: ++++ - tag: -1 - value expressions: - expr: _col0 - type: string - expr: _col1 - type: string - expr: _col2 - type: string - expr: _col3 - type: string + Select Operator + expressions: + expr: key + type: string + expr: value + type: string + expr: ds + type: string + expr: hr + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Union + Select Operator + expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + outputColumnNames: _col0, _col1, _col2, _col3 + Reduce Output Operator + key expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string + sort order: ++++ + tag: -1 + value expressions: + expr: _col0 + type: string + expr: _col1 + type: string + expr: _col2 + type: string + expr: _col3 + type: string Needs Tagging: false Path -> Alias: pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 [null-subquery1:a-subquery1:x, null-subquery2:a-subquery2:y] Index: ql/src/test/results/compiler/plan/case_sensitivity.q.xml =================================================================== --- ql/src/test/results/compiler/plan/case_sensitivity.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/case_sensitivity.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304059959 + 1310382261 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304059959 + 1310382261 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1304059959 + 1310382261 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1304059959 + 1310382261 @@ -622,327 +622,176 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-52-40_122_3999959333490330122/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_3_NUM_INPUT_ROWS - - - CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_3_TIME_TAKEN - - - CNTR_NAME_FS_3_FATAL_ERROR - - - - - FS_3 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - - - - - - - lintstring - - - src_thrift - - - - - - - - - myint - - - mystring - - - underscore_int - - - - - - - - - int - - - - - - - - - - - - - - - - - - - - - - - - 0 - - - - - - - - - - - - - - - MYSTRING - - - false - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10002 - - _col0 - - - - - - - lint - - - src_thrift - - - - - - - - - - - - - - - - - 1 - - - - - - - - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-22_117_318942524743087866/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_2_NUM_INPUT_ROWS + CNTR_NAME_FS_3_NUM_INPUT_ROWS - CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS + CNTR_NAME_FS_3_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_2_TIME_TAKEN + CNTR_NAME_FS_3_TIME_TAKEN - CNTR_NAME_SEL_2_FATAL_ERROR + CNTR_NAME_FS_3_FATAL_ERROR - SEL_2 + FS_3 - + - - + + + + + + + + + + _col1 + + + + - - - _col0 + + + lintstring - - + + src_thrift + + + + + + + + myint + + + mystring + + + underscore_int + + + + + + + + + int + + + + + + + + + + + + + + + - - - _col1 + + + - - + + 0 + + + + + + + + MYSTRING + + + false + + + + - - - - - - + + _col0 + - - - - - - - lint - - - src_thrift - - - - - - - - - - - - - 0 - - - - + + + lint - - + + src_thrift - + + + + + @@ -952,44 +801,64 @@ - 0 + 1 - + - - - boolean - - + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_1_NUM_INPUT_ROWS + CNTR_NAME_SEL_2_NUM_INPUT_ROWS - CNTR_NAME_FIL_1_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_2_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_1_TIME_TAKEN + CNTR_NAME_SEL_2_TIME_TAKEN - CNTR_NAME_FIL_1_FATAL_ERROR + CNTR_NAME_SEL_2_FATAL_ERROR - FIL_1 + SEL_2 @@ -1003,28 +872,22 @@ - + - lint + _col0 - - src_thrift - - + - + - lintstring + _col1 - - src_thrift - - + @@ -1095,7 +958,11 @@ - + + + boolean + + @@ -1158,7 +1025,17 @@ - + + + lint + + + src_thrift + + + + + @@ -1178,7 +1055,17 @@ - + + + lintstring + + + src_thrift + + + + + @@ -1303,7 +1190,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift src_thrift @@ -1315,7 +1202,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift src_thrift @@ -1376,11 +1263,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1304059959 + 1310382261 @@ -1442,11 +1329,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1304059959 + 1310382261 Index: ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/cast1.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -62,11 +62,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304059967 + 1310382265 @@ -124,11 +124,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304059967 + 1310382265 @@ -152,592 +152,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-04-28_23-52-49_698_4831640136709839662/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-04-28_23-52-49_698_4831640136709839662/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3,_col4,_col5,_col6 - - - serialization.format - 1 - - - columns.types - int:double:double:double:int:boolean:int - - - - - - - 1 - - - - - - - CNTR_NAME_FS_17_NUM_INPUT_ROWS - - - CNTR_NAME_FS_17_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_17_TIME_TAKEN - - - CNTR_NAME_FS_17_FATAL_ERROR - - - - - FS_17 - - - - - - - - - - - - - - - - _col0 - - - - - - - - int - - - - - - - - - _col1 - - - - - - - - double - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - _col4 - - - - - - - - - - - - - _col5 - - - - - - - - boolean - - - - - - - - - _col6 - - - - - - - - - - - - - - + + + + + file:/tmp/amarsri/hive_2011-07-11_04-04-26_769_5192710661051968856/-ext-10001 - - - - - - _col6 - - - - - - - - - - true - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToInteger - - - UDFToInteger - - - - - - - + + 1 - - _col5 - - - - - - - - - - 1 - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToBoolean - - - UDFToBoolean - - - - - - - + + file:/tmp/amarsri/hive_2011-07-11_04-04-26_769_5192710661051968856/-ext-10001/ - - _col4 - - - - - - - - - - 3 - - - - - - - - - - - - - - 2.0 - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFToInteger - - - UDFToInteger - - - - - - - - - + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - + + org.apache.hadoop.mapred.TextInputFormat - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - _col3 - - - - - - - - - - 3.0 - - + + + + columns + _col0,_col1,_col2,_col3,_col4,_col5,_col6 - - - - - - - 2.0 - - + + serialization.format + 1 - - - - - - true + + columns.types + int:double:double:double:int:boolean:int - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - - - - - _col2 - - - - - - - - - - 3 - - - - - - - - - - 2.0 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - - - - - - + + 1 - - _col1 - - - - - - - - - - 3.0 - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - - - - - - - - - _col0 - - - - - - - - - - 3 - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPPlus - - - + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - _col4 - - - _col5 - - - _col6 - - - - - - CNTR_NAME_SEL_16_NUM_INPUT_ROWS + CNTR_NAME_FS_16_NUM_INPUT_ROWS - CNTR_NAME_SEL_16_NUM_OUTPUT_ROWS + CNTR_NAME_FS_16_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_16_TIME_TAKEN + CNTR_NAME_FS_16_TIME_TAKEN - CNTR_NAME_SEL_16_FATAL_ERROR + CNTR_NAME_FS_16_FATAL_ERROR - SEL_16 + FS_16 - + @@ -747,38 +234,46 @@ - - _c0 - _col0 + + + - + + + int + + - - _c1 - _col1 + + + - + + + double + + - - _c2 - _col2 + + + @@ -786,12 +281,12 @@ - - _c3 - _col3 + + + @@ -799,12 +294,12 @@ - - _c4 - _col4 + + + @@ -812,25 +307,29 @@ - - _c5 - _col5 + + + - + + + boolean + + - - _c6 - _col6 + + + @@ -844,69 +343,392 @@ - - - - + + + + _col6 + - - - key + + + - - src + + true + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToInteger + + + UDFToInteger + + + + + + + + + + _col5 + + + + + - - - string + + + + 1 + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFToBoolean + + + UDFToBoolean + + + + + + + + + + _col4 + + + + + + + + + + 3 + + + + + + + + + + + + + + 2.0 + + + + + + org.apache.hadoop.hive.ql.udf.UDFToInteger + + + UDFToInteger + + + + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + + + + + + + + + _col3 + + + + + + + 3.0 + + + + + + + + + + 2.0 + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + + + + + + + + + _col2 + + + + + + - 86 + 3 + + + + + + + 2.0 + + + - + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + - + + + _col1 + + + + + + + + + + 3.0 + + + + + + + + + + 2 + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + + + + + + + + + _col0 + + + + + + + + + + 3 + + + + + + + + + + 2 + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFOPPlus + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + _col4 + + + _col5 + + + _col6 + + + + + - CNTR_NAME_FIL_15_NUM_INPUT_ROWS + CNTR_NAME_SEL_15_NUM_INPUT_ROWS - CNTR_NAME_FIL_15_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_15_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_15_TIME_TAKEN + CNTR_NAME_SEL_15_TIME_TAKEN - CNTR_NAME_FIL_15_FATAL_ERROR + CNTR_NAME_SEL_15_FATAL_ERROR - FIL_15 + SEL_15 @@ -920,18 +742,96 @@ - + + + _c0 + - key + _col0 - - src + + + + + + + + _c1 + + + _col1 + - + + + + + _c2 + + + _col2 + + + + + + + + + + _c3 + + + _col3 + + + + + + + + + + _c4 + + + _col4 + + + + + + + + + + _c5 + + + _col5 + + + + + + + + + + _c6 + + + _col6 + + + + + + @@ -955,7 +855,11 @@ src - + + + string + + @@ -984,21 +888,21 @@ - CNTR_NAME_FIL_18_NUM_INPUT_ROWS + CNTR_NAME_FIL_17_NUM_INPUT_ROWS - CNTR_NAME_FIL_18_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_17_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_18_TIME_TAKEN + CNTR_NAME_FIL_17_TIME_TAKEN - CNTR_NAME_FIL_18_FATAL_ERROR + CNTR_NAME_FIL_17_FATAL_ERROR - FIL_18 + FIL_17 @@ -1012,7 +916,17 @@ - + + + key + + + src + + + + + @@ -1084,16 +998,16 @@ - CNTR_NAME_TS_14_NUM_INPUT_ROWS + CNTR_NAME_TS_13_NUM_INPUT_ROWS - CNTR_NAME_TS_14_NUM_OUTPUT_ROWS + CNTR_NAME_TS_13_NUM_OUTPUT_ROWS - CNTR_NAME_TS_14_TIME_TAKEN + CNTR_NAME_TS_13_TIME_TAKEN - CNTR_NAME_TS_14_FATAL_ERROR + CNTR_NAME_TS_13_FATAL_ERROR @@ -1105,7 +1019,7 @@ - TS_14 + TS_13 @@ -1124,7 +1038,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -1136,7 +1050,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -1193,11 +1107,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304059967 + 1310382265 @@ -1255,11 +1169,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304059967 + 1310382265 Index: ql/src/test/results/compiler/plan/groupby1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby1.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/groupby1.q.xml (working copy) @@ -435,21 +435,21 @@ - CNTR_NAME_RS_27_NUM_INPUT_ROWS + CNTR_NAME_RS_25_NUM_INPUT_ROWS - CNTR_NAME_RS_27_NUM_OUTPUT_ROWS + CNTR_NAME_RS_25_NUM_OUTPUT_ROWS - CNTR_NAME_RS_27_TIME_TAKEN + CNTR_NAME_RS_25_TIME_TAKEN - CNTR_NAME_RS_27_FATAL_ERROR + CNTR_NAME_RS_25_FATAL_ERROR - RS_27 + RS_25 @@ -614,21 +614,21 @@ - CNTR_NAME_GBY_26_NUM_INPUT_ROWS + CNTR_NAME_GBY_24_NUM_INPUT_ROWS - CNTR_NAME_GBY_26_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_24_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_26_TIME_TAKEN + CNTR_NAME_GBY_24_TIME_TAKEN - CNTR_NAME_GBY_26_FATAL_ERROR + CNTR_NAME_GBY_24_FATAL_ERROR - GBY_26 + GBY_24 @@ -725,21 +725,21 @@ - CNTR_NAME_SEL_25_NUM_INPUT_ROWS + CNTR_NAME_SEL_23_NUM_INPUT_ROWS - CNTR_NAME_SEL_25_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_23_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_25_TIME_TAKEN + CNTR_NAME_SEL_23_TIME_TAKEN - CNTR_NAME_SEL_25_FATAL_ERROR + CNTR_NAME_SEL_23_FATAL_ERROR - SEL_25 + SEL_23 @@ -799,16 +799,16 @@ - CNTR_NAME_TS_24_NUM_INPUT_ROWS + CNTR_NAME_TS_22_NUM_INPUT_ROWS - CNTR_NAME_TS_24_NUM_OUTPUT_ROWS + CNTR_NAME_TS_22_NUM_OUTPUT_ROWS - CNTR_NAME_TS_24_TIME_TAKEN + CNTR_NAME_TS_22_TIME_TAKEN - CNTR_NAME_TS_24_FATAL_ERROR + CNTR_NAME_TS_22_FATAL_ERROR @@ -823,7 +823,7 @@ - TS_24 + TS_22 @@ -1080,21 +1080,21 @@ - CNTR_NAME_FS_30_NUM_INPUT_ROWS + CNTR_NAME_FS_28_NUM_INPUT_ROWS - CNTR_NAME_FS_30_NUM_OUTPUT_ROWS + CNTR_NAME_FS_28_NUM_OUTPUT_ROWS - CNTR_NAME_FS_30_TIME_TAKEN + CNTR_NAME_FS_28_TIME_TAKEN - CNTR_NAME_FS_30_FATAL_ERROR + CNTR_NAME_FS_28_FATAL_ERROR - FS_30 + FS_28 @@ -1200,21 +1200,21 @@ - CNTR_NAME_SEL_29_NUM_INPUT_ROWS + CNTR_NAME_SEL_27_NUM_INPUT_ROWS - CNTR_NAME_SEL_29_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_27_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_29_TIME_TAKEN + CNTR_NAME_SEL_27_TIME_TAKEN - CNTR_NAME_SEL_29_FATAL_ERROR + CNTR_NAME_SEL_27_FATAL_ERROR - SEL_29 + SEL_27 @@ -1338,21 +1338,21 @@ - CNTR_NAME_GBY_28_NUM_INPUT_ROWS + CNTR_NAME_GBY_26_NUM_INPUT_ROWS - CNTR_NAME_GBY_28_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_26_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_28_TIME_TAKEN + CNTR_NAME_GBY_26_TIME_TAKEN - CNTR_NAME_GBY_28_FATAL_ERROR + CNTR_NAME_GBY_26_FATAL_ERROR - GBY_28 + GBY_26 Index: ql/src/test/results/compiler/plan/groupby2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby2.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/groupby2.q.xml (working copy) @@ -349,21 +349,21 @@ - CNTR_NAME_RS_41_NUM_INPUT_ROWS + CNTR_NAME_RS_39_NUM_INPUT_ROWS - CNTR_NAME_RS_41_NUM_OUTPUT_ROWS + CNTR_NAME_RS_39_NUM_OUTPUT_ROWS - CNTR_NAME_RS_41_TIME_TAKEN + CNTR_NAME_RS_39_TIME_TAKEN - CNTR_NAME_RS_41_FATAL_ERROR + CNTR_NAME_RS_39_FATAL_ERROR - RS_41 + RS_39 @@ -707,21 +707,21 @@ - CNTR_NAME_GBY_40_NUM_INPUT_ROWS + CNTR_NAME_GBY_38_NUM_INPUT_ROWS - CNTR_NAME_GBY_40_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_38_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_40_TIME_TAKEN + CNTR_NAME_GBY_38_TIME_TAKEN - CNTR_NAME_GBY_40_FATAL_ERROR + CNTR_NAME_GBY_38_FATAL_ERROR - GBY_40 + GBY_38 @@ -844,21 +844,21 @@ - CNTR_NAME_SEL_39_NUM_INPUT_ROWS + CNTR_NAME_SEL_37_NUM_INPUT_ROWS - CNTR_NAME_SEL_39_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_37_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_39_TIME_TAKEN + CNTR_NAME_SEL_37_TIME_TAKEN - CNTR_NAME_SEL_39_FATAL_ERROR + CNTR_NAME_SEL_37_FATAL_ERROR - SEL_39 + SEL_37 @@ -918,16 +918,16 @@ - CNTR_NAME_TS_38_NUM_INPUT_ROWS + CNTR_NAME_TS_36_NUM_INPUT_ROWS - CNTR_NAME_TS_38_NUM_OUTPUT_ROWS + CNTR_NAME_TS_36_NUM_OUTPUT_ROWS - CNTR_NAME_TS_38_TIME_TAKEN + CNTR_NAME_TS_36_TIME_TAKEN - CNTR_NAME_TS_38_FATAL_ERROR + CNTR_NAME_TS_36_FATAL_ERROR @@ -942,7 +942,7 @@ - TS_38 + TS_36 @@ -1212,21 +1212,21 @@ - CNTR_NAME_FS_44_NUM_INPUT_ROWS + CNTR_NAME_FS_42_NUM_INPUT_ROWS - CNTR_NAME_FS_44_NUM_OUTPUT_ROWS + CNTR_NAME_FS_42_NUM_OUTPUT_ROWS - CNTR_NAME_FS_44_TIME_TAKEN + CNTR_NAME_FS_42_TIME_TAKEN - CNTR_NAME_FS_44_FATAL_ERROR + CNTR_NAME_FS_42_FATAL_ERROR - FS_44 + FS_42 @@ -1399,21 +1399,21 @@ - CNTR_NAME_SEL_43_NUM_INPUT_ROWS + CNTR_NAME_SEL_41_NUM_INPUT_ROWS - CNTR_NAME_SEL_43_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_41_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_43_TIME_TAKEN + CNTR_NAME_SEL_41_TIME_TAKEN - CNTR_NAME_SEL_43_FATAL_ERROR + CNTR_NAME_SEL_41_FATAL_ERROR - SEL_43 + SEL_41 @@ -1591,21 +1591,21 @@ - CNTR_NAME_GBY_42_NUM_INPUT_ROWS + CNTR_NAME_GBY_40_NUM_INPUT_ROWS - CNTR_NAME_GBY_42_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_40_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_42_TIME_TAKEN + CNTR_NAME_GBY_40_TIME_TAKEN - CNTR_NAME_GBY_42_FATAL_ERROR + CNTR_NAME_GBY_40_FATAL_ERROR - GBY_42 + GBY_40 Index: ql/src/test/results/compiler/plan/groupby3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby3.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/groupby3.q.xml (working copy) @@ -390,21 +390,21 @@ - CNTR_NAME_RS_55_NUM_INPUT_ROWS + CNTR_NAME_RS_53_NUM_INPUT_ROWS - CNTR_NAME_RS_55_NUM_OUTPUT_ROWS + CNTR_NAME_RS_53_NUM_OUTPUT_ROWS - CNTR_NAME_RS_55_TIME_TAKEN + CNTR_NAME_RS_53_TIME_TAKEN - CNTR_NAME_RS_55_FATAL_ERROR + CNTR_NAME_RS_53_FATAL_ERROR - RS_55 + RS_53 @@ -899,21 +899,21 @@ - CNTR_NAME_GBY_54_NUM_INPUT_ROWS + CNTR_NAME_GBY_52_NUM_INPUT_ROWS - CNTR_NAME_GBY_54_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_52_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_54_TIME_TAKEN + CNTR_NAME_GBY_52_TIME_TAKEN - CNTR_NAME_GBY_54_FATAL_ERROR + CNTR_NAME_GBY_52_FATAL_ERROR - GBY_54 + GBY_52 @@ -1046,21 +1046,21 @@ - CNTR_NAME_SEL_53_NUM_INPUT_ROWS + CNTR_NAME_SEL_51_NUM_INPUT_ROWS - CNTR_NAME_SEL_53_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_51_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_53_TIME_TAKEN + CNTR_NAME_SEL_51_TIME_TAKEN - CNTR_NAME_SEL_53_FATAL_ERROR + CNTR_NAME_SEL_51_FATAL_ERROR - SEL_53 + SEL_51 @@ -1107,16 +1107,16 @@ - CNTR_NAME_TS_52_NUM_INPUT_ROWS + CNTR_NAME_TS_50_NUM_INPUT_ROWS - CNTR_NAME_TS_52_NUM_OUTPUT_ROWS + CNTR_NAME_TS_50_NUM_OUTPUT_ROWS - CNTR_NAME_TS_52_TIME_TAKEN + CNTR_NAME_TS_50_TIME_TAKEN - CNTR_NAME_TS_52_FATAL_ERROR + CNTR_NAME_TS_50_FATAL_ERROR @@ -1128,7 +1128,7 @@ - TS_52 + TS_50 @@ -1408,21 +1408,21 @@ - CNTR_NAME_FS_58_NUM_INPUT_ROWS + CNTR_NAME_FS_56_NUM_INPUT_ROWS - CNTR_NAME_FS_58_NUM_OUTPUT_ROWS + CNTR_NAME_FS_56_NUM_OUTPUT_ROWS - CNTR_NAME_FS_58_TIME_TAKEN + CNTR_NAME_FS_56_TIME_TAKEN - CNTR_NAME_FS_58_FATAL_ERROR + CNTR_NAME_FS_56_FATAL_ERROR - FS_58 + FS_56 @@ -1627,21 +1627,21 @@ - CNTR_NAME_SEL_57_NUM_INPUT_ROWS + CNTR_NAME_SEL_55_NUM_INPUT_ROWS - CNTR_NAME_SEL_57_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_55_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_57_TIME_TAKEN + CNTR_NAME_SEL_55_TIME_TAKEN - CNTR_NAME_SEL_57_FATAL_ERROR + CNTR_NAME_SEL_55_FATAL_ERROR - SEL_57 + SEL_55 @@ -1916,21 +1916,21 @@ - CNTR_NAME_GBY_56_NUM_INPUT_ROWS + CNTR_NAME_GBY_54_NUM_INPUT_ROWS - CNTR_NAME_GBY_56_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_54_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_56_TIME_TAKEN + CNTR_NAME_GBY_54_TIME_TAKEN - CNTR_NAME_GBY_56_FATAL_ERROR + CNTR_NAME_GBY_54_FATAL_ERROR - GBY_56 + GBY_54 Index: ql/src/test/results/compiler/plan/groupby4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby4.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/groupby4.q.xml (working copy) @@ -279,21 +279,21 @@ - CNTR_NAME_RS_69_NUM_INPUT_ROWS + CNTR_NAME_RS_67_NUM_INPUT_ROWS - CNTR_NAME_RS_69_NUM_OUTPUT_ROWS + CNTR_NAME_RS_67_NUM_OUTPUT_ROWS - CNTR_NAME_RS_69_TIME_TAKEN + CNTR_NAME_RS_67_TIME_TAKEN - CNTR_NAME_RS_69_FATAL_ERROR + CNTR_NAME_RS_67_FATAL_ERROR - RS_69 + RS_67 @@ -422,21 +422,21 @@ - CNTR_NAME_GBY_68_NUM_INPUT_ROWS + CNTR_NAME_GBY_66_NUM_INPUT_ROWS - CNTR_NAME_GBY_68_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_66_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_68_TIME_TAKEN + CNTR_NAME_GBY_66_TIME_TAKEN - CNTR_NAME_GBY_68_FATAL_ERROR + CNTR_NAME_GBY_66_FATAL_ERROR - GBY_68 + GBY_66 @@ -504,21 +504,21 @@ - CNTR_NAME_SEL_67_NUM_INPUT_ROWS + CNTR_NAME_SEL_65_NUM_INPUT_ROWS - CNTR_NAME_SEL_67_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_65_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_67_TIME_TAKEN + CNTR_NAME_SEL_65_TIME_TAKEN - CNTR_NAME_SEL_67_FATAL_ERROR + CNTR_NAME_SEL_65_FATAL_ERROR - SEL_67 + SEL_65 @@ -565,16 +565,16 @@ - CNTR_NAME_TS_66_NUM_INPUT_ROWS + CNTR_NAME_TS_64_NUM_INPUT_ROWS - CNTR_NAME_TS_66_NUM_OUTPUT_ROWS + CNTR_NAME_TS_64_NUM_OUTPUT_ROWS - CNTR_NAME_TS_66_TIME_TAKEN + CNTR_NAME_TS_64_TIME_TAKEN - CNTR_NAME_TS_66_FATAL_ERROR + CNTR_NAME_TS_64_FATAL_ERROR @@ -586,7 +586,7 @@ - TS_66 + TS_64 @@ -870,21 +870,21 @@ - CNTR_NAME_FS_72_NUM_INPUT_ROWS + CNTR_NAME_FS_70_NUM_INPUT_ROWS - CNTR_NAME_FS_72_NUM_OUTPUT_ROWS + CNTR_NAME_FS_70_NUM_OUTPUT_ROWS - CNTR_NAME_FS_72_TIME_TAKEN + CNTR_NAME_FS_70_TIME_TAKEN - CNTR_NAME_FS_72_FATAL_ERROR + CNTR_NAME_FS_70_FATAL_ERROR - FS_72 + FS_70 @@ -957,21 +957,21 @@ - CNTR_NAME_SEL_71_NUM_INPUT_ROWS + CNTR_NAME_SEL_69_NUM_INPUT_ROWS - CNTR_NAME_SEL_71_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_69_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_71_TIME_TAKEN + CNTR_NAME_SEL_69_TIME_TAKEN - CNTR_NAME_SEL_71_FATAL_ERROR + CNTR_NAME_SEL_69_FATAL_ERROR - SEL_71 + SEL_69 @@ -1055,21 +1055,21 @@ - CNTR_NAME_GBY_70_NUM_INPUT_ROWS + CNTR_NAME_GBY_68_NUM_INPUT_ROWS - CNTR_NAME_GBY_70_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_68_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_70_TIME_TAKEN + CNTR_NAME_GBY_68_TIME_TAKEN - CNTR_NAME_GBY_70_FATAL_ERROR + CNTR_NAME_GBY_68_FATAL_ERROR - GBY_70 + GBY_68 Index: ql/src/test/results/compiler/plan/groupby5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby5.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/groupby5.q.xml (working copy) @@ -301,21 +301,21 @@ - CNTR_NAME_RS_83_NUM_INPUT_ROWS + CNTR_NAME_RS_81_NUM_INPUT_ROWS - CNTR_NAME_RS_83_NUM_OUTPUT_ROWS + CNTR_NAME_RS_81_NUM_OUTPUT_ROWS - CNTR_NAME_RS_83_TIME_TAKEN + CNTR_NAME_RS_81_TIME_TAKEN - CNTR_NAME_RS_83_FATAL_ERROR + CNTR_NAME_RS_81_FATAL_ERROR - RS_83 + RS_81 @@ -480,21 +480,21 @@ - CNTR_NAME_GBY_82_NUM_INPUT_ROWS + CNTR_NAME_GBY_80_NUM_INPUT_ROWS - CNTR_NAME_GBY_82_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_80_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_82_TIME_TAKEN + CNTR_NAME_GBY_80_TIME_TAKEN - CNTR_NAME_GBY_82_FATAL_ERROR + CNTR_NAME_GBY_80_FATAL_ERROR - GBY_82 + GBY_80 @@ -591,21 +591,21 @@ - CNTR_NAME_SEL_81_NUM_INPUT_ROWS + CNTR_NAME_SEL_79_NUM_INPUT_ROWS - CNTR_NAME_SEL_81_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_79_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_81_TIME_TAKEN + CNTR_NAME_SEL_79_TIME_TAKEN - CNTR_NAME_SEL_81_FATAL_ERROR + CNTR_NAME_SEL_79_FATAL_ERROR - SEL_81 + SEL_79 @@ -665,16 +665,16 @@ - CNTR_NAME_TS_80_NUM_INPUT_ROWS + CNTR_NAME_TS_78_NUM_INPUT_ROWS - CNTR_NAME_TS_80_NUM_OUTPUT_ROWS + CNTR_NAME_TS_78_NUM_OUTPUT_ROWS - CNTR_NAME_TS_80_TIME_TAKEN + CNTR_NAME_TS_78_TIME_TAKEN - CNTR_NAME_TS_80_FATAL_ERROR + CNTR_NAME_TS_78_FATAL_ERROR @@ -689,7 +689,7 @@ - TS_80 + TS_78 @@ -963,21 +963,21 @@ - CNTR_NAME_FS_86_NUM_INPUT_ROWS + CNTR_NAME_FS_84_NUM_INPUT_ROWS - CNTR_NAME_FS_86_NUM_OUTPUT_ROWS + CNTR_NAME_FS_84_NUM_OUTPUT_ROWS - CNTR_NAME_FS_86_TIME_TAKEN + CNTR_NAME_FS_84_TIME_TAKEN - CNTR_NAME_FS_86_FATAL_ERROR + CNTR_NAME_FS_84_FATAL_ERROR - FS_86 + FS_84 @@ -1083,21 +1083,21 @@ - CNTR_NAME_SEL_85_NUM_INPUT_ROWS + CNTR_NAME_SEL_83_NUM_INPUT_ROWS - CNTR_NAME_SEL_85_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_83_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_85_TIME_TAKEN + CNTR_NAME_SEL_83_TIME_TAKEN - CNTR_NAME_SEL_85_FATAL_ERROR + CNTR_NAME_SEL_83_FATAL_ERROR - SEL_85 + SEL_83 @@ -1227,21 +1227,21 @@ - CNTR_NAME_GBY_84_NUM_INPUT_ROWS + CNTR_NAME_GBY_82_NUM_INPUT_ROWS - CNTR_NAME_GBY_84_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_82_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_84_TIME_TAKEN + CNTR_NAME_GBY_82_TIME_TAKEN - CNTR_NAME_GBY_84_FATAL_ERROR + CNTR_NAME_GBY_82_FATAL_ERROR - GBY_84 + GBY_82 Index: ql/src/test/results/compiler/plan/groupby6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby6.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/groupby6.q.xml (working copy) @@ -279,21 +279,21 @@ - CNTR_NAME_RS_97_NUM_INPUT_ROWS + CNTR_NAME_RS_95_NUM_INPUT_ROWS - CNTR_NAME_RS_97_NUM_OUTPUT_ROWS + CNTR_NAME_RS_95_NUM_OUTPUT_ROWS - CNTR_NAME_RS_97_TIME_TAKEN + CNTR_NAME_RS_95_TIME_TAKEN - CNTR_NAME_RS_97_FATAL_ERROR + CNTR_NAME_RS_95_FATAL_ERROR - RS_97 + RS_95 @@ -422,21 +422,21 @@ - CNTR_NAME_GBY_96_NUM_INPUT_ROWS + CNTR_NAME_GBY_94_NUM_INPUT_ROWS - CNTR_NAME_GBY_96_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_94_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_96_TIME_TAKEN + CNTR_NAME_GBY_94_TIME_TAKEN - CNTR_NAME_GBY_96_FATAL_ERROR + CNTR_NAME_GBY_94_FATAL_ERROR - GBY_96 + GBY_94 @@ -504,21 +504,21 @@ - CNTR_NAME_SEL_95_NUM_INPUT_ROWS + CNTR_NAME_SEL_93_NUM_INPUT_ROWS - CNTR_NAME_SEL_95_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_93_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_95_TIME_TAKEN + CNTR_NAME_SEL_93_TIME_TAKEN - CNTR_NAME_SEL_95_FATAL_ERROR + CNTR_NAME_SEL_93_FATAL_ERROR - SEL_95 + SEL_93 @@ -565,16 +565,16 @@ - CNTR_NAME_TS_94_NUM_INPUT_ROWS + CNTR_NAME_TS_92_NUM_INPUT_ROWS - CNTR_NAME_TS_94_NUM_OUTPUT_ROWS + CNTR_NAME_TS_92_NUM_OUTPUT_ROWS - CNTR_NAME_TS_94_TIME_TAKEN + CNTR_NAME_TS_92_TIME_TAKEN - CNTR_NAME_TS_94_FATAL_ERROR + CNTR_NAME_TS_92_FATAL_ERROR @@ -586,7 +586,7 @@ - TS_94 + TS_92 @@ -870,21 +870,21 @@ - CNTR_NAME_FS_100_NUM_INPUT_ROWS + CNTR_NAME_FS_98_NUM_INPUT_ROWS - CNTR_NAME_FS_100_NUM_OUTPUT_ROWS + CNTR_NAME_FS_98_NUM_OUTPUT_ROWS - CNTR_NAME_FS_100_TIME_TAKEN + CNTR_NAME_FS_98_TIME_TAKEN - CNTR_NAME_FS_100_FATAL_ERROR + CNTR_NAME_FS_98_FATAL_ERROR - FS_100 + FS_98 @@ -957,21 +957,21 @@ - CNTR_NAME_SEL_99_NUM_INPUT_ROWS + CNTR_NAME_SEL_97_NUM_INPUT_ROWS - CNTR_NAME_SEL_99_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_97_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_99_TIME_TAKEN + CNTR_NAME_SEL_97_TIME_TAKEN - CNTR_NAME_SEL_99_FATAL_ERROR + CNTR_NAME_SEL_97_FATAL_ERROR - SEL_99 + SEL_97 @@ -1055,21 +1055,21 @@ - CNTR_NAME_GBY_98_NUM_INPUT_ROWS + CNTR_NAME_GBY_96_NUM_INPUT_ROWS - CNTR_NAME_GBY_98_NUM_OUTPUT_ROWS + CNTR_NAME_GBY_96_NUM_OUTPUT_ROWS - CNTR_NAME_GBY_98_TIME_TAKEN + CNTR_NAME_GBY_96_TIME_TAKEN - CNTR_NAME_GBY_98_FATAL_ERROR + CNTR_NAME_GBY_96_FATAL_ERROR - GBY_98 + GBY_96 Index: ql/src/test/results/compiler/plan/input1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input1.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input1.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060029 + 1310382293 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_114_NUM_INPUT_ROWS + CNTR_NAME_FS_112_NUM_INPUT_ROWS - CNTR_NAME_FS_114_NUM_OUTPUT_ROWS + CNTR_NAME_FS_112_NUM_OUTPUT_ROWS - CNTR_NAME_FS_114_TIME_TAKEN + CNTR_NAME_FS_112_TIME_TAKEN - CNTR_NAME_FS_114_FATAL_ERROR + CNTR_NAME_FS_112_FATAL_ERROR - FS_114 + FS_112 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_113_NUM_INPUT_ROWS + CNTR_NAME_TS_111_NUM_INPUT_ROWS - CNTR_NAME_TS_113_NUM_OUTPUT_ROWS + CNTR_NAME_TS_111_NUM_OUTPUT_ROWS - CNTR_NAME_TS_113_TIME_TAKEN + CNTR_NAME_TS_111_TIME_TAKEN - CNTR_NAME_TS_113_FATAL_ERROR + CNTR_NAME_TS_111_FATAL_ERROR - TS_113 + TS_111 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060029 + 1310382293 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10002 @@ -524,11 +524,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060027 + 1310382292 @@ -586,11 +586,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060027 + 1310382292 @@ -614,260 +614,143 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-53-49_576_2888243981035876529/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_111_NUM_INPUT_ROWS - - - CNTR_NAME_FS_111_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_111_TIME_TAKEN - - - CNTR_NAME_FS_111_FATAL_ERROR - - - - - FS_111 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - src - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10002 - - _col0 - - - key - - - src - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-04-53_288_7531718006958804873/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_110_NUM_INPUT_ROWS + CNTR_NAME_FS_109_NUM_INPUT_ROWS - CNTR_NAME_SEL_110_NUM_OUTPUT_ROWS + CNTR_NAME_FS_109_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_110_TIME_TAKEN + CNTR_NAME_FS_109_TIME_TAKEN - CNTR_NAME_SEL_110_FATAL_ERROR + CNTR_NAME_FS_109_FATAL_ERROR - SEL_110 + FS_109 - + - - - - - - - _col0 - - - src - - - - - - - - - - _col1 - - - src - - - - - - - - - + - - - - - - - - - - key - - - src - - - - - - - - - - - - int - - - - - 100 - - - - + + + + _col1 + + + value - - + + src - - - boolean - - + + + _col0 + + + key + + + src + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_109_NUM_INPUT_ROWS + CNTR_NAME_SEL_108_NUM_INPUT_ROWS - CNTR_NAME_FIL_109_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_108_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_109_TIME_TAKEN + CNTR_NAME_SEL_108_TIME_TAKEN - CNTR_NAME_FIL_109_FATAL_ERROR + CNTR_NAME_SEL_108_FATAL_ERROR - FIL_109 + SEL_108 @@ -881,9 +764,9 @@ - + - key + _col0 src @@ -894,9 +777,9 @@ - + - value + _col1 src @@ -936,7 +819,11 @@ - + + + int + + 100 @@ -949,7 +836,11 @@ - + + + boolean + + @@ -958,21 +849,21 @@ - CNTR_NAME_FIL_112_NUM_INPUT_ROWS + CNTR_NAME_FIL_110_NUM_INPUT_ROWS - CNTR_NAME_FIL_112_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_110_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_112_TIME_TAKEN + CNTR_NAME_FIL_110_TIME_TAKEN - CNTR_NAME_FIL_112_FATAL_ERROR + CNTR_NAME_FIL_110_FATAL_ERROR - FIL_112 + FIL_110 @@ -986,10 +877,30 @@ - + + + key + + + src + + + + + - + + + value + + + src + + + + + @@ -1048,16 +959,16 @@ - CNTR_NAME_TS_108_NUM_INPUT_ROWS + CNTR_NAME_TS_106_NUM_INPUT_ROWS - CNTR_NAME_TS_108_NUM_OUTPUT_ROWS + CNTR_NAME_TS_106_NUM_OUTPUT_ROWS - CNTR_NAME_TS_108_TIME_TAKEN + CNTR_NAME_TS_106_TIME_TAKEN - CNTR_NAME_TS_108_FATAL_ERROR + CNTR_NAME_TS_106_FATAL_ERROR @@ -1072,7 +983,7 @@ - TS_108 + TS_106 @@ -1094,7 +1005,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -1106,7 +1017,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -1163,11 +1074,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060027 + 1310382292 @@ -1225,11 +1136,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060027 + 1310382292 Index: ql/src/test/results/compiler/plan/input2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input2.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input2.q.xml (working copy) @@ -159,21 +159,21 @@ - CNTR_NAME_FS_133_NUM_INPUT_ROWS + CNTR_NAME_FS_130_NUM_INPUT_ROWS - CNTR_NAME_FS_133_NUM_OUTPUT_ROWS + CNTR_NAME_FS_130_NUM_OUTPUT_ROWS - CNTR_NAME_FS_133_TIME_TAKEN + CNTR_NAME_FS_130_TIME_TAKEN - CNTR_NAME_FS_133_FATAL_ERROR + CNTR_NAME_FS_130_FATAL_ERROR - FS_133 + FS_130 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_132_NUM_INPUT_ROWS + CNTR_NAME_TS_129_NUM_INPUT_ROWS - CNTR_NAME_TS_132_NUM_OUTPUT_ROWS + CNTR_NAME_TS_129_NUM_OUTPUT_ROWS - CNTR_NAME_TS_132_TIME_TAKEN + CNTR_NAME_TS_129_TIME_TAKEN - CNTR_NAME_TS_132_FATAL_ERROR + CNTR_NAME_TS_129_FATAL_ERROR - TS_132 + TS_129 @@ -617,21 +617,21 @@ - CNTR_NAME_FS_135_NUM_INPUT_ROWS + CNTR_NAME_FS_132_NUM_INPUT_ROWS - CNTR_NAME_FS_135_NUM_OUTPUT_ROWS + CNTR_NAME_FS_132_NUM_OUTPUT_ROWS - CNTR_NAME_FS_135_TIME_TAKEN + CNTR_NAME_FS_132_TIME_TAKEN - CNTR_NAME_FS_135_FATAL_ERROR + CNTR_NAME_FS_132_FATAL_ERROR - FS_135 + FS_132 @@ -681,21 +681,21 @@ - CNTR_NAME_TS_134_NUM_INPUT_ROWS + CNTR_NAME_TS_131_NUM_INPUT_ROWS - CNTR_NAME_TS_134_NUM_OUTPUT_ROWS + CNTR_NAME_TS_131_NUM_OUTPUT_ROWS - CNTR_NAME_TS_134_TIME_TAKEN + CNTR_NAME_TS_131_TIME_TAKEN - CNTR_NAME_TS_134_FATAL_ERROR + CNTR_NAME_TS_131_FATAL_ERROR - TS_134 + TS_131 @@ -1075,21 +1075,21 @@ - CNTR_NAME_FS_137_NUM_INPUT_ROWS + CNTR_NAME_FS_134_NUM_INPUT_ROWS - CNTR_NAME_FS_137_NUM_OUTPUT_ROWS + CNTR_NAME_FS_134_NUM_OUTPUT_ROWS - CNTR_NAME_FS_137_TIME_TAKEN + CNTR_NAME_FS_134_TIME_TAKEN - CNTR_NAME_FS_137_FATAL_ERROR + CNTR_NAME_FS_134_FATAL_ERROR - FS_137 + FS_134 @@ -1139,21 +1139,21 @@ - CNTR_NAME_TS_136_NUM_INPUT_ROWS + CNTR_NAME_TS_133_NUM_INPUT_ROWS - CNTR_NAME_TS_136_NUM_OUTPUT_ROWS + CNTR_NAME_TS_133_NUM_OUTPUT_ROWS - CNTR_NAME_TS_136_TIME_TAKEN + CNTR_NAME_TS_133_TIME_TAKEN - CNTR_NAME_TS_136_FATAL_ERROR + CNTR_NAME_TS_133_FATAL_ERROR - TS_136 + TS_133 @@ -1568,21 +1568,21 @@ - CNTR_NAME_FS_125_NUM_INPUT_ROWS + CNTR_NAME_FS_122_NUM_INPUT_ROWS - CNTR_NAME_FS_125_NUM_OUTPUT_ROWS + CNTR_NAME_FS_122_NUM_OUTPUT_ROWS - CNTR_NAME_FS_125_TIME_TAKEN + CNTR_NAME_FS_122_TIME_TAKEN - CNTR_NAME_FS_125_FATAL_ERROR + CNTR_NAME_FS_122_FATAL_ERROR - FS_125 + FS_122 @@ -1660,21 +1660,21 @@ - CNTR_NAME_SEL_124_NUM_INPUT_ROWS + CNTR_NAME_SEL_121_NUM_INPUT_ROWS - CNTR_NAME_SEL_124_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_121_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_124_TIME_TAKEN + CNTR_NAME_SEL_121_TIME_TAKEN - CNTR_NAME_SEL_124_FATAL_ERROR + CNTR_NAME_SEL_121_FATAL_ERROR - SEL_124 + SEL_121 @@ -1773,21 +1773,21 @@ - CNTR_NAME_FIL_123_NUM_INPUT_ROWS + CNTR_NAME_FIL_120_NUM_INPUT_ROWS - CNTR_NAME_FIL_123_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_120_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_123_TIME_TAKEN + CNTR_NAME_FIL_120_TIME_TAKEN - CNTR_NAME_FIL_123_FATAL_ERROR + CNTR_NAME_FIL_120_FATAL_ERROR - FIL_123 + FIL_120 @@ -1870,21 +1870,21 @@ - CNTR_NAME_FS_128_NUM_INPUT_ROWS + CNTR_NAME_FS_125_NUM_INPUT_ROWS - CNTR_NAME_FS_128_NUM_OUTPUT_ROWS + CNTR_NAME_FS_125_NUM_OUTPUT_ROWS - CNTR_NAME_FS_128_TIME_TAKEN + CNTR_NAME_FS_125_TIME_TAKEN - CNTR_NAME_FS_128_FATAL_ERROR + CNTR_NAME_FS_125_FATAL_ERROR - FS_128 + FS_125 @@ -1959,21 +1959,21 @@ - CNTR_NAME_SEL_127_NUM_INPUT_ROWS + CNTR_NAME_SEL_124_NUM_INPUT_ROWS - CNTR_NAME_SEL_127_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_124_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_127_TIME_TAKEN + CNTR_NAME_SEL_124_TIME_TAKEN - CNTR_NAME_SEL_127_FATAL_ERROR + CNTR_NAME_SEL_124_FATAL_ERROR - SEL_127 + SEL_124 @@ -2115,21 +2115,21 @@ - CNTR_NAME_FIL_126_NUM_INPUT_ROWS + CNTR_NAME_FIL_123_NUM_INPUT_ROWS - CNTR_NAME_FIL_126_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_123_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_126_TIME_TAKEN + CNTR_NAME_FIL_123_TIME_TAKEN - CNTR_NAME_FIL_126_FATAL_ERROR + CNTR_NAME_FIL_123_FATAL_ERROR - FIL_126 + FIL_123 @@ -2195,21 +2195,21 @@ - CNTR_NAME_FS_131_NUM_INPUT_ROWS + CNTR_NAME_FS_128_NUM_INPUT_ROWS - CNTR_NAME_FS_131_NUM_OUTPUT_ROWS + CNTR_NAME_FS_128_NUM_OUTPUT_ROWS - CNTR_NAME_FS_131_TIME_TAKEN + CNTR_NAME_FS_128_TIME_TAKEN - CNTR_NAME_FS_131_FATAL_ERROR + CNTR_NAME_FS_128_FATAL_ERROR - FS_131 + FS_128 @@ -2281,21 +2281,21 @@ - CNTR_NAME_SEL_130_NUM_INPUT_ROWS + CNTR_NAME_SEL_127_NUM_INPUT_ROWS - CNTR_NAME_SEL_130_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_127_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_130_TIME_TAKEN + CNTR_NAME_SEL_127_TIME_TAKEN - CNTR_NAME_SEL_130_FATAL_ERROR + CNTR_NAME_SEL_127_FATAL_ERROR - SEL_130 + SEL_127 @@ -2383,21 +2383,21 @@ - CNTR_NAME_FIL_129_NUM_INPUT_ROWS + CNTR_NAME_FIL_126_NUM_INPUT_ROWS - CNTR_NAME_FIL_129_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_126_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_129_TIME_TAKEN + CNTR_NAME_FIL_126_TIME_TAKEN - CNTR_NAME_FIL_129_FATAL_ERROR + CNTR_NAME_FIL_126_FATAL_ERROR - FIL_129 + FIL_126 @@ -2434,16 +2434,16 @@ - CNTR_NAME_TS_122_NUM_INPUT_ROWS + CNTR_NAME_TS_119_NUM_INPUT_ROWS - CNTR_NAME_TS_122_NUM_OUTPUT_ROWS + CNTR_NAME_TS_119_NUM_OUTPUT_ROWS - CNTR_NAME_TS_122_TIME_TAKEN + CNTR_NAME_TS_119_TIME_TAKEN - CNTR_NAME_TS_122_FATAL_ERROR + CNTR_NAME_TS_119_FATAL_ERROR @@ -2458,7 +2458,7 @@ - TS_122 + TS_119 Index: ql/src/test/results/compiler/plan/input20.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input20.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input20.q.xml (working copy) @@ -319,21 +319,21 @@ - CNTR_NAME_RS_157_NUM_INPUT_ROWS + CNTR_NAME_RS_154_NUM_INPUT_ROWS - CNTR_NAME_RS_157_NUM_OUTPUT_ROWS + CNTR_NAME_RS_154_NUM_OUTPUT_ROWS - CNTR_NAME_RS_157_TIME_TAKEN + CNTR_NAME_RS_154_TIME_TAKEN - CNTR_NAME_RS_157_FATAL_ERROR + CNTR_NAME_RS_154_FATAL_ERROR - RS_157 + RS_154 @@ -498,21 +498,21 @@ - CNTR_NAME_SCR_156_NUM_INPUT_ROWS + CNTR_NAME_SCR_153_NUM_INPUT_ROWS - CNTR_NAME_SCR_156_NUM_OUTPUT_ROWS + CNTR_NAME_SCR_153_NUM_OUTPUT_ROWS - CNTR_NAME_SCR_156_TIME_TAKEN + CNTR_NAME_SCR_153_TIME_TAKEN - CNTR_NAME_SCR_156_FATAL_ERROR + CNTR_NAME_SCR_153_FATAL_ERROR - SCR_156 + SCR_153 @@ -667,21 +667,21 @@ - CNTR_NAME_SEL_155_NUM_INPUT_ROWS + CNTR_NAME_SEL_152_NUM_INPUT_ROWS - CNTR_NAME_SEL_155_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_152_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_155_TIME_TAKEN + CNTR_NAME_SEL_152_TIME_TAKEN - CNTR_NAME_SEL_155_FATAL_ERROR + CNTR_NAME_SEL_152_FATAL_ERROR - SEL_155 + SEL_152 @@ -735,16 +735,16 @@ - CNTR_NAME_TS_154_NUM_INPUT_ROWS + CNTR_NAME_TS_151_NUM_INPUT_ROWS - CNTR_NAME_TS_154_NUM_OUTPUT_ROWS + CNTR_NAME_TS_151_NUM_OUTPUT_ROWS - CNTR_NAME_TS_154_TIME_TAKEN + CNTR_NAME_TS_151_TIME_TAKEN - CNTR_NAME_TS_154_FATAL_ERROR + CNTR_NAME_TS_151_FATAL_ERROR @@ -756,7 +756,7 @@ - TS_154 + TS_151 @@ -1054,21 +1054,21 @@ - CNTR_NAME_FS_161_NUM_INPUT_ROWS + CNTR_NAME_FS_158_NUM_INPUT_ROWS - CNTR_NAME_FS_161_NUM_OUTPUT_ROWS + CNTR_NAME_FS_158_NUM_OUTPUT_ROWS - CNTR_NAME_FS_161_TIME_TAKEN + CNTR_NAME_FS_158_TIME_TAKEN - CNTR_NAME_FS_161_FATAL_ERROR + CNTR_NAME_FS_158_FATAL_ERROR - FS_161 + FS_158 @@ -1229,21 +1229,21 @@ - CNTR_NAME_SCR_160_NUM_INPUT_ROWS + CNTR_NAME_SCR_157_NUM_INPUT_ROWS - CNTR_NAME_SCR_160_NUM_OUTPUT_ROWS + CNTR_NAME_SCR_157_NUM_OUTPUT_ROWS - CNTR_NAME_SCR_160_TIME_TAKEN + CNTR_NAME_SCR_157_TIME_TAKEN - CNTR_NAME_SCR_160_FATAL_ERROR + CNTR_NAME_SCR_157_FATAL_ERROR - SCR_160 + SCR_157 @@ -1349,21 +1349,21 @@ - CNTR_NAME_SEL_159_NUM_INPUT_ROWS + CNTR_NAME_SEL_156_NUM_INPUT_ROWS - CNTR_NAME_SEL_159_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_156_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_159_TIME_TAKEN + CNTR_NAME_SEL_156_TIME_TAKEN - CNTR_NAME_SEL_159_FATAL_ERROR + CNTR_NAME_SEL_156_FATAL_ERROR - SEL_159 + SEL_156 @@ -1424,21 +1424,21 @@ - CNTR_NAME_OP_158_NUM_INPUT_ROWS + CNTR_NAME_OP_155_NUM_INPUT_ROWS - CNTR_NAME_OP_158_NUM_OUTPUT_ROWS + CNTR_NAME_OP_155_NUM_OUTPUT_ROWS - CNTR_NAME_OP_158_TIME_TAKEN + CNTR_NAME_OP_155_TIME_TAKEN - CNTR_NAME_OP_158_FATAL_ERROR + CNTR_NAME_OP_155_FATAL_ERROR - OP_158 + OP_155 Index: ql/src/test/results/compiler/plan/input3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input3.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input3.q.xml (working copy) @@ -159,21 +159,21 @@ - CNTR_NAME_FS_184_NUM_INPUT_ROWS + CNTR_NAME_FS_181_NUM_INPUT_ROWS - CNTR_NAME_FS_184_NUM_OUTPUT_ROWS + CNTR_NAME_FS_181_NUM_OUTPUT_ROWS - CNTR_NAME_FS_184_TIME_TAKEN + CNTR_NAME_FS_181_TIME_TAKEN - CNTR_NAME_FS_184_FATAL_ERROR + CNTR_NAME_FS_181_FATAL_ERROR - FS_184 + FS_181 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_183_NUM_INPUT_ROWS + CNTR_NAME_TS_180_NUM_INPUT_ROWS - CNTR_NAME_TS_183_NUM_OUTPUT_ROWS + CNTR_NAME_TS_180_NUM_OUTPUT_ROWS - CNTR_NAME_TS_183_TIME_TAKEN + CNTR_NAME_TS_180_TIME_TAKEN - CNTR_NAME_TS_183_FATAL_ERROR + CNTR_NAME_TS_180_FATAL_ERROR - TS_183 + TS_180 @@ -617,21 +617,21 @@ - CNTR_NAME_FS_186_NUM_INPUT_ROWS + CNTR_NAME_FS_183_NUM_INPUT_ROWS - CNTR_NAME_FS_186_NUM_OUTPUT_ROWS + CNTR_NAME_FS_183_NUM_OUTPUT_ROWS - CNTR_NAME_FS_186_TIME_TAKEN + CNTR_NAME_FS_183_TIME_TAKEN - CNTR_NAME_FS_186_FATAL_ERROR + CNTR_NAME_FS_183_FATAL_ERROR - FS_186 + FS_183 @@ -681,21 +681,21 @@ - CNTR_NAME_TS_185_NUM_INPUT_ROWS + CNTR_NAME_TS_182_NUM_INPUT_ROWS - CNTR_NAME_TS_185_NUM_OUTPUT_ROWS + CNTR_NAME_TS_182_NUM_OUTPUT_ROWS - CNTR_NAME_TS_185_TIME_TAKEN + CNTR_NAME_TS_182_TIME_TAKEN - CNTR_NAME_TS_185_FATAL_ERROR + CNTR_NAME_TS_182_FATAL_ERROR - TS_185 + TS_182 @@ -1075,21 +1075,21 @@ - CNTR_NAME_FS_188_NUM_INPUT_ROWS + CNTR_NAME_FS_185_NUM_INPUT_ROWS - CNTR_NAME_FS_188_NUM_OUTPUT_ROWS + CNTR_NAME_FS_185_NUM_OUTPUT_ROWS - CNTR_NAME_FS_188_TIME_TAKEN + CNTR_NAME_FS_185_TIME_TAKEN - CNTR_NAME_FS_188_FATAL_ERROR + CNTR_NAME_FS_185_FATAL_ERROR - FS_188 + FS_185 @@ -1139,21 +1139,21 @@ - CNTR_NAME_TS_187_NUM_INPUT_ROWS + CNTR_NAME_TS_184_NUM_INPUT_ROWS - CNTR_NAME_TS_187_NUM_OUTPUT_ROWS + CNTR_NAME_TS_184_NUM_OUTPUT_ROWS - CNTR_NAME_TS_187_TIME_TAKEN + CNTR_NAME_TS_184_TIME_TAKEN - CNTR_NAME_TS_187_FATAL_ERROR + CNTR_NAME_TS_184_FATAL_ERROR - TS_187 + TS_184 @@ -1471,21 +1471,21 @@ - CNTR_NAME_FS_190_NUM_INPUT_ROWS + CNTR_NAME_FS_187_NUM_INPUT_ROWS - CNTR_NAME_FS_190_NUM_OUTPUT_ROWS + CNTR_NAME_FS_187_NUM_OUTPUT_ROWS - CNTR_NAME_FS_190_TIME_TAKEN + CNTR_NAME_FS_187_TIME_TAKEN - CNTR_NAME_FS_190_FATAL_ERROR + CNTR_NAME_FS_187_FATAL_ERROR - FS_190 + FS_187 @@ -1522,21 +1522,21 @@ - CNTR_NAME_TS_189_NUM_INPUT_ROWS + CNTR_NAME_TS_186_NUM_INPUT_ROWS - CNTR_NAME_TS_189_NUM_OUTPUT_ROWS + CNTR_NAME_TS_186_NUM_OUTPUT_ROWS - CNTR_NAME_TS_189_TIME_TAKEN + CNTR_NAME_TS_186_TIME_TAKEN - CNTR_NAME_TS_189_FATAL_ERROR + CNTR_NAME_TS_186_FATAL_ERROR - TS_189 + TS_186 @@ -1904,21 +1904,21 @@ - CNTR_NAME_FS_173_NUM_INPUT_ROWS + CNTR_NAME_FS_170_NUM_INPUT_ROWS - CNTR_NAME_FS_173_NUM_OUTPUT_ROWS + CNTR_NAME_FS_170_NUM_OUTPUT_ROWS - CNTR_NAME_FS_173_TIME_TAKEN + CNTR_NAME_FS_170_TIME_TAKEN - CNTR_NAME_FS_173_FATAL_ERROR + CNTR_NAME_FS_170_FATAL_ERROR - FS_173 + FS_170 @@ -1996,21 +1996,21 @@ - CNTR_NAME_SEL_172_NUM_INPUT_ROWS + CNTR_NAME_SEL_169_NUM_INPUT_ROWS - CNTR_NAME_SEL_172_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_169_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_172_TIME_TAKEN + CNTR_NAME_SEL_169_TIME_TAKEN - CNTR_NAME_SEL_172_FATAL_ERROR + CNTR_NAME_SEL_169_FATAL_ERROR - SEL_172 + SEL_169 @@ -2109,21 +2109,21 @@ - CNTR_NAME_FIL_171_NUM_INPUT_ROWS + CNTR_NAME_FIL_168_NUM_INPUT_ROWS - CNTR_NAME_FIL_171_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_168_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_171_TIME_TAKEN + CNTR_NAME_FIL_168_TIME_TAKEN - CNTR_NAME_FIL_171_FATAL_ERROR + CNTR_NAME_FIL_168_FATAL_ERROR - FIL_171 + FIL_168 @@ -2206,21 +2206,21 @@ - CNTR_NAME_FS_176_NUM_INPUT_ROWS + CNTR_NAME_FS_173_NUM_INPUT_ROWS - CNTR_NAME_FS_176_NUM_OUTPUT_ROWS + CNTR_NAME_FS_173_NUM_OUTPUT_ROWS - CNTR_NAME_FS_176_TIME_TAKEN + CNTR_NAME_FS_173_TIME_TAKEN - CNTR_NAME_FS_176_FATAL_ERROR + CNTR_NAME_FS_173_FATAL_ERROR - FS_176 + FS_173 @@ -2295,21 +2295,21 @@ - CNTR_NAME_SEL_175_NUM_INPUT_ROWS + CNTR_NAME_SEL_172_NUM_INPUT_ROWS - CNTR_NAME_SEL_175_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_172_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_175_TIME_TAKEN + CNTR_NAME_SEL_172_TIME_TAKEN - CNTR_NAME_SEL_175_FATAL_ERROR + CNTR_NAME_SEL_172_FATAL_ERROR - SEL_175 + SEL_172 @@ -2451,21 +2451,21 @@ - CNTR_NAME_FIL_174_NUM_INPUT_ROWS + CNTR_NAME_FIL_171_NUM_INPUT_ROWS - CNTR_NAME_FIL_174_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_171_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_174_TIME_TAKEN + CNTR_NAME_FIL_171_TIME_TAKEN - CNTR_NAME_FIL_174_FATAL_ERROR + CNTR_NAME_FIL_171_FATAL_ERROR - FIL_174 + FIL_171 @@ -2531,21 +2531,21 @@ - CNTR_NAME_FS_179_NUM_INPUT_ROWS + CNTR_NAME_FS_176_NUM_INPUT_ROWS - CNTR_NAME_FS_179_NUM_OUTPUT_ROWS + CNTR_NAME_FS_176_NUM_OUTPUT_ROWS - CNTR_NAME_FS_179_TIME_TAKEN + CNTR_NAME_FS_176_TIME_TAKEN - CNTR_NAME_FS_179_FATAL_ERROR + CNTR_NAME_FS_176_FATAL_ERROR - FS_179 + FS_176 @@ -2617,21 +2617,21 @@ - CNTR_NAME_SEL_178_NUM_INPUT_ROWS + CNTR_NAME_SEL_175_NUM_INPUT_ROWS - CNTR_NAME_SEL_178_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_175_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_178_TIME_TAKEN + CNTR_NAME_SEL_175_TIME_TAKEN - CNTR_NAME_SEL_178_FATAL_ERROR + CNTR_NAME_SEL_175_FATAL_ERROR - SEL_178 + SEL_175 @@ -2770,21 +2770,21 @@ - CNTR_NAME_FIL_177_NUM_INPUT_ROWS + CNTR_NAME_FIL_174_NUM_INPUT_ROWS - CNTR_NAME_FIL_177_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_174_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_177_TIME_TAKEN + CNTR_NAME_FIL_174_TIME_TAKEN - CNTR_NAME_FIL_177_FATAL_ERROR + CNTR_NAME_FIL_174_FATAL_ERROR - FIL_177 + FIL_174 @@ -2841,21 +2841,21 @@ - CNTR_NAME_FS_182_NUM_INPUT_ROWS + CNTR_NAME_FS_179_NUM_INPUT_ROWS - CNTR_NAME_FS_182_NUM_OUTPUT_ROWS + CNTR_NAME_FS_179_NUM_OUTPUT_ROWS - CNTR_NAME_FS_182_TIME_TAKEN + CNTR_NAME_FS_179_TIME_TAKEN - CNTR_NAME_FS_182_FATAL_ERROR + CNTR_NAME_FS_179_FATAL_ERROR - FS_182 + FS_179 @@ -2910,21 +2910,21 @@ - CNTR_NAME_SEL_181_NUM_INPUT_ROWS + CNTR_NAME_SEL_178_NUM_INPUT_ROWS - CNTR_NAME_SEL_181_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_178_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_181_TIME_TAKEN + CNTR_NAME_SEL_178_TIME_TAKEN - CNTR_NAME_SEL_181_FATAL_ERROR + CNTR_NAME_SEL_178_FATAL_ERROR - SEL_181 + SEL_178 @@ -3005,21 +3005,21 @@ - CNTR_NAME_FIL_180_NUM_INPUT_ROWS + CNTR_NAME_FIL_177_NUM_INPUT_ROWS - CNTR_NAME_FIL_180_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_177_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_180_TIME_TAKEN + CNTR_NAME_FIL_177_TIME_TAKEN - CNTR_NAME_FIL_180_FATAL_ERROR + CNTR_NAME_FIL_177_FATAL_ERROR - FIL_180 + FIL_177 @@ -3059,16 +3059,16 @@ - CNTR_NAME_TS_170_NUM_INPUT_ROWS + CNTR_NAME_TS_167_NUM_INPUT_ROWS - CNTR_NAME_TS_170_NUM_OUTPUT_ROWS + CNTR_NAME_TS_167_NUM_OUTPUT_ROWS - CNTR_NAME_TS_170_TIME_TAKEN + CNTR_NAME_TS_167_TIME_TAKEN - CNTR_NAME_TS_170_FATAL_ERROR + CNTR_NAME_TS_167_FATAL_ERROR @@ -3083,7 +3083,7 @@ - TS_170 + TS_167 Index: ql/src/test/results/compiler/plan/input4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input4.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input4.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -22,7 +22,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-25_717_5787624435007958970/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10000/ @@ -58,7 +58,7 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-25_717_5787624435007958970/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10000 @@ -111,11 +111,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060065 + 1310382308 @@ -125,7 +125,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-25_717_5787624435007958970/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10001 @@ -196,11 +196,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060063 + 1310382307 @@ -258,11 +258,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060063 + 1310382307 @@ -290,160 +290,274 @@ - - - - - _col1 - - - _col1 - - - - - string + + + + + + + + + _col1 + + + _col1 + + + + + string + + + + + + _col0 + + + _col0 + + + + + + - - - - _col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 + + + + - - + + + + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + reducesinkkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + + + reducesinkkey0 + + + + + + + _col0 + + + _col1 + + + + + + + + + _col0 + + + + + + + + + + -1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - reducesinkkey0 + + + + CNTR_NAME_RS_212_NUM_INPUT_ROWS - - serialization.sort.order - + + + CNTR_NAME_RS_212_NUM_OUTPUT_ROWS - - columns.types - string + + CNTR_NAME_RS_212_TIME_TAKEN + + CNTR_NAME_RS_212_FATAL_ERROR + - - - - 1 - - - -1 - - - - - reducesinkkey0 + + RS_212 - - - - - - _col0 + + + + + + - - _col1 + + + + + + + + tkey + + + _col0 + + + + + + + + + + tvalue + + + _col1 + + + + + + + + + - - - - - - _col0 + + + + + + + + + + + + _col0 + + + + + - - + + + + + + int + + + + + 100 + + - - - - -1 - - - - - + + - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 + + + + boolean - - columns.types - string,string - - - escape.delim - \ - @@ -453,21 +567,21 @@ - CNTR_NAME_RS_215_NUM_INPUT_ROWS + CNTR_NAME_FIL_217_NUM_INPUT_ROWS - CNTR_NAME_RS_215_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_217_NUM_OUTPUT_ROWS - CNTR_NAME_RS_215_TIME_TAKEN + CNTR_NAME_FIL_217_TIME_TAKEN - CNTR_NAME_RS_215_FATAL_ERROR + CNTR_NAME_FIL_217_FATAL_ERROR - RS_215 + FIL_217 @@ -479,34 +593,7 @@ - - - - - tkey - - - _col0 - - - - - - - - - - tvalue - - - _col1 - - - - - - - + @@ -628,21 +715,21 @@ - CNTR_NAME_SCR_214_NUM_INPUT_ROWS + CNTR_NAME_SCR_211_NUM_INPUT_ROWS - CNTR_NAME_SCR_214_NUM_OUTPUT_ROWS + CNTR_NAME_SCR_211_NUM_OUTPUT_ROWS - CNTR_NAME_SCR_214_TIME_TAKEN + CNTR_NAME_SCR_211_TIME_TAKEN - CNTR_NAME_SCR_214_FATAL_ERROR + CNTR_NAME_SCR_211_FATAL_ERROR - SCR_214 + SCR_211 @@ -721,21 +808,21 @@ - CNTR_NAME_SEL_213_NUM_INPUT_ROWS + CNTR_NAME_SEL_210_NUM_INPUT_ROWS - CNTR_NAME_SEL_213_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_210_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_213_TIME_TAKEN + CNTR_NAME_SEL_210_TIME_TAKEN - CNTR_NAME_SEL_213_FATAL_ERROR + CNTR_NAME_SEL_210_FATAL_ERROR - SEL_213 + SEL_210 @@ -789,16 +876,16 @@ - CNTR_NAME_TS_212_NUM_INPUT_ROWS + CNTR_NAME_TS_209_NUM_INPUT_ROWS - CNTR_NAME_TS_212_NUM_OUTPUT_ROWS + CNTR_NAME_TS_209_NUM_OUTPUT_ROWS - CNTR_NAME_TS_212_TIME_TAKEN + CNTR_NAME_TS_209_TIME_TAKEN - CNTR_NAME_TS_212_FATAL_ERROR + CNTR_NAME_TS_209_FATAL_ERROR @@ -813,7 +900,7 @@ - TS_212 + TS_209 @@ -904,7 +991,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src tmap:src @@ -916,7 +1003,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -973,11 +1060,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060063 + 1310382307 @@ -1035,11 +1122,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060063 + 1310382307 @@ -1057,183 +1144,59 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-25_717_5787624435007958970/-ext-10000 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-25_717_5787624435007958970/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_219_NUM_INPUT_ROWS - - - CNTR_NAME_FS_219_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_219_TIME_TAKEN - - - CNTR_NAME_FS_219_FATAL_ERROR - - - - - FS_219 - - - - - - - - - - - - - - - - key - - - - - - - - - - - - - value - - - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - _col1 - - - tmap - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10000 - - _col0 - - - _col0 - - - tmap - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-08_392_4021619949206592501/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_218_NUM_INPUT_ROWS + CNTR_NAME_FS_216_NUM_INPUT_ROWS - CNTR_NAME_SEL_218_NUM_OUTPUT_ROWS + CNTR_NAME_FS_216_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_218_TIME_TAKEN + CNTR_NAME_FS_216_TIME_TAKEN - CNTR_NAME_SEL_218_FATAL_ERROR + CNTR_NAME_FS_216_FATAL_ERROR - SEL_218 + FS_216 - + @@ -1244,10 +1207,10 @@ - _col0 + key - tmap + @@ -1257,10 +1220,10 @@ - _col1 + value - tmap + @@ -1275,73 +1238,80 @@ - - - - - - - - - - _col0 - - - tmap - - - - - - - - - - - - int - - - - - 100 - - - - + + + + _col1 + + + _col1 - - + + tmap - - - boolean - - + + + _col0 + + + _col0 + + + tmap + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_217_NUM_INPUT_ROWS + CNTR_NAME_SEL_215_NUM_INPUT_ROWS - CNTR_NAME_FIL_217_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_215_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_217_TIME_TAKEN + CNTR_NAME_SEL_215_TIME_TAKEN - CNTR_NAME_FIL_217_FATAL_ERROR + CNTR_NAME_SEL_215_FATAL_ERROR - FIL_217 + SEL_215 @@ -1355,7 +1325,7 @@ - + _col0 @@ -1368,7 +1338,7 @@ - + _col1 @@ -1408,21 +1378,21 @@ - CNTR_NAME_OP_216_NUM_INPUT_ROWS + CNTR_NAME_OP_213_NUM_INPUT_ROWS - CNTR_NAME_OP_216_NUM_OUTPUT_ROWS + CNTR_NAME_OP_213_NUM_OUTPUT_ROWS - CNTR_NAME_OP_216_TIME_TAKEN + CNTR_NAME_OP_213_TIME_TAKEN - CNTR_NAME_OP_216_FATAL_ERROR + CNTR_NAME_OP_213_FATAL_ERROR - OP_216 + OP_213 @@ -1436,10 +1406,30 @@ - + + + _col0 + + + tmap + + + + + - + + + _col1 + + + tmap + + + + + Index: ql/src/test/results/compiler/plan/input5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input5.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input5.q.xml (working copy) @@ -461,21 +461,21 @@ - CNTR_NAME_RS_231_NUM_INPUT_ROWS + CNTR_NAME_RS_229_NUM_INPUT_ROWS - CNTR_NAME_RS_231_NUM_OUTPUT_ROWS + CNTR_NAME_RS_229_NUM_OUTPUT_ROWS - CNTR_NAME_RS_231_TIME_TAKEN + CNTR_NAME_RS_229_TIME_TAKEN - CNTR_NAME_RS_231_FATAL_ERROR + CNTR_NAME_RS_229_FATAL_ERROR - RS_231 + RS_229 @@ -636,21 +636,21 @@ - CNTR_NAME_SCR_230_NUM_INPUT_ROWS + CNTR_NAME_SCR_228_NUM_INPUT_ROWS - CNTR_NAME_SCR_230_NUM_OUTPUT_ROWS + CNTR_NAME_SCR_228_NUM_OUTPUT_ROWS - CNTR_NAME_SCR_230_TIME_TAKEN + CNTR_NAME_SCR_228_TIME_TAKEN - CNTR_NAME_SCR_230_FATAL_ERROR + CNTR_NAME_SCR_228_FATAL_ERROR - SCR_230 + SCR_228 @@ -768,21 +768,21 @@ - CNTR_NAME_SEL_229_NUM_INPUT_ROWS + CNTR_NAME_SEL_227_NUM_INPUT_ROWS - CNTR_NAME_SEL_229_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_227_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_229_TIME_TAKEN + CNTR_NAME_SEL_227_TIME_TAKEN - CNTR_NAME_SEL_229_FATAL_ERROR + CNTR_NAME_SEL_227_FATAL_ERROR - SEL_229 + SEL_227 @@ -836,16 +836,16 @@ - CNTR_NAME_TS_228_NUM_INPUT_ROWS + CNTR_NAME_TS_226_NUM_INPUT_ROWS - CNTR_NAME_TS_228_NUM_OUTPUT_ROWS + CNTR_NAME_TS_226_NUM_OUTPUT_ROWS - CNTR_NAME_TS_228_TIME_TAKEN + CNTR_NAME_TS_226_TIME_TAKEN - CNTR_NAME_TS_228_FATAL_ERROR + CNTR_NAME_TS_226_FATAL_ERROR @@ -860,7 +860,7 @@ - TS_228 + TS_226 @@ -1208,21 +1208,21 @@ - CNTR_NAME_FS_234_NUM_INPUT_ROWS + CNTR_NAME_FS_232_NUM_INPUT_ROWS - CNTR_NAME_FS_234_NUM_OUTPUT_ROWS + CNTR_NAME_FS_232_NUM_OUTPUT_ROWS - CNTR_NAME_FS_234_TIME_TAKEN + CNTR_NAME_FS_232_TIME_TAKEN - CNTR_NAME_FS_234_FATAL_ERROR + CNTR_NAME_FS_232_FATAL_ERROR - FS_234 + FS_232 @@ -1328,21 +1328,21 @@ - CNTR_NAME_SEL_233_NUM_INPUT_ROWS + CNTR_NAME_SEL_231_NUM_INPUT_ROWS - CNTR_NAME_SEL_233_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_231_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_233_TIME_TAKEN + CNTR_NAME_SEL_231_TIME_TAKEN - CNTR_NAME_SEL_233_FATAL_ERROR + CNTR_NAME_SEL_231_FATAL_ERROR - SEL_233 + SEL_231 @@ -1409,21 +1409,21 @@ - CNTR_NAME_OP_232_NUM_INPUT_ROWS + CNTR_NAME_OP_230_NUM_INPUT_ROWS - CNTR_NAME_OP_232_NUM_OUTPUT_ROWS + CNTR_NAME_OP_230_NUM_OUTPUT_ROWS - CNTR_NAME_OP_232_TIME_TAKEN + CNTR_NAME_OP_230_TIME_TAKEN - CNTR_NAME_OP_232_FATAL_ERROR + CNTR_NAME_OP_230_FATAL_ERROR - OP_232 + OP_230 Index: ql/src/test/results/compiler/plan/input6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input6.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input6.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060084 + 1310382315 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_248_NUM_INPUT_ROWS + CNTR_NAME_FS_246_NUM_INPUT_ROWS - CNTR_NAME_FS_248_NUM_OUTPUT_ROWS + CNTR_NAME_FS_246_NUM_OUTPUT_ROWS - CNTR_NAME_FS_248_TIME_TAKEN + CNTR_NAME_FS_246_TIME_TAKEN - CNTR_NAME_FS_248_FATAL_ERROR + CNTR_NAME_FS_246_FATAL_ERROR - FS_248 + FS_246 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_247_NUM_INPUT_ROWS + CNTR_NAME_TS_245_NUM_INPUT_ROWS - CNTR_NAME_TS_247_NUM_OUTPUT_ROWS + CNTR_NAME_TS_245_NUM_OUTPUT_ROWS - CNTR_NAME_TS_247_TIME_TAKEN + CNTR_NAME_TS_245_TIME_TAKEN - CNTR_NAME_TS_247_FATAL_ERROR + CNTR_NAME_TS_245_FATAL_ERROR - TS_247 + TS_245 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060084 + 1310382315 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10002 @@ -524,11 +524,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1304060082 + 1310382314 @@ -586,11 +586,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1304060082 + 1310382314 @@ -614,246 +614,143 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-54-44_457_1118129043044323606/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_245_NUM_INPUT_ROWS - - - CNTR_NAME_FS_245_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_245_TIME_TAKEN - - - CNTR_NAME_FS_245_FATAL_ERROR - - - - - FS_245 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - src1 - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10002 - - _col0 - - - key - - - src1 - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-15_593_5091965176340900232/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_244_NUM_INPUT_ROWS + CNTR_NAME_FS_243_NUM_INPUT_ROWS - CNTR_NAME_SEL_244_NUM_OUTPUT_ROWS + CNTR_NAME_FS_243_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_244_TIME_TAKEN + CNTR_NAME_FS_243_TIME_TAKEN - CNTR_NAME_SEL_244_FATAL_ERROR + CNTR_NAME_FS_243_FATAL_ERROR - SEL_244 + FS_243 - + - - - - - - - _col0 - - - src1 - - - - - - - - - - _col1 - - - src1 - - - - - - - - - + - - - - - - - - - - key - - - src1 - - - - - - - + + + + _col1 + + + value - - + + src1 - - - boolean - - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_243_NUM_INPUT_ROWS + CNTR_NAME_SEL_242_NUM_INPUT_ROWS - CNTR_NAME_FIL_243_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_242_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_243_TIME_TAKEN + CNTR_NAME_SEL_242_TIME_TAKEN - CNTR_NAME_FIL_243_FATAL_ERROR + CNTR_NAME_SEL_242_FATAL_ERROR - FIL_243 + SEL_242 @@ -867,9 +764,9 @@ - + - key + _col0 src1 @@ -880,9 +777,9 @@ - + - value + _col1 src1 @@ -925,7 +822,11 @@ - + + + boolean + + @@ -934,21 +835,21 @@ - CNTR_NAME_FIL_246_NUM_INPUT_ROWS + CNTR_NAME_FIL_244_NUM_INPUT_ROWS - CNTR_NAME_FIL_246_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_244_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_246_TIME_TAKEN + CNTR_NAME_FIL_244_TIME_TAKEN - CNTR_NAME_FIL_246_FATAL_ERROR + CNTR_NAME_FIL_244_FATAL_ERROR - FIL_246 + FIL_244 @@ -962,10 +863,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -1024,16 +945,16 @@ - CNTR_NAME_TS_242_NUM_INPUT_ROWS + CNTR_NAME_TS_240_NUM_INPUT_ROWS - CNTR_NAME_TS_242_NUM_OUTPUT_ROWS + CNTR_NAME_TS_240_NUM_OUTPUT_ROWS - CNTR_NAME_TS_242_TIME_TAKEN + CNTR_NAME_TS_240_TIME_TAKEN - CNTR_NAME_TS_242_FATAL_ERROR + CNTR_NAME_TS_240_FATAL_ERROR @@ -1048,7 +969,7 @@ - TS_242 + TS_240 @@ -1070,7 +991,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 src1 @@ -1082,7 +1003,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 src1 @@ -1139,11 +1060,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1304060082 + 1310382314 @@ -1201,11 +1122,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1304060082 + 1310382314 Index: ql/src/test/results/compiler/plan/input7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input7.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input7.q.xml (working copy) @@ -159,21 +159,21 @@ - CNTR_NAME_FS_260_NUM_INPUT_ROWS + CNTR_NAME_FS_257_NUM_INPUT_ROWS - CNTR_NAME_FS_260_NUM_OUTPUT_ROWS + CNTR_NAME_FS_257_NUM_OUTPUT_ROWS - CNTR_NAME_FS_260_TIME_TAKEN + CNTR_NAME_FS_257_TIME_TAKEN - CNTR_NAME_FS_260_FATAL_ERROR + CNTR_NAME_FS_257_FATAL_ERROR - FS_260 + FS_257 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_259_NUM_INPUT_ROWS + CNTR_NAME_TS_256_NUM_INPUT_ROWS - CNTR_NAME_TS_259_NUM_OUTPUT_ROWS + CNTR_NAME_TS_256_NUM_OUTPUT_ROWS - CNTR_NAME_TS_259_TIME_TAKEN + CNTR_NAME_TS_256_TIME_TAKEN - CNTR_NAME_TS_259_FATAL_ERROR + CNTR_NAME_TS_256_FATAL_ERROR - TS_259 + TS_256 @@ -643,21 +643,21 @@ - CNTR_NAME_FS_258_NUM_INPUT_ROWS + CNTR_NAME_FS_255_NUM_INPUT_ROWS - CNTR_NAME_FS_258_NUM_OUTPUT_ROWS + CNTR_NAME_FS_255_NUM_OUTPUT_ROWS - CNTR_NAME_FS_258_TIME_TAKEN + CNTR_NAME_FS_255_TIME_TAKEN - CNTR_NAME_FS_258_FATAL_ERROR + CNTR_NAME_FS_255_FATAL_ERROR - FS_258 + FS_255 @@ -726,21 +726,21 @@ - CNTR_NAME_SEL_257_NUM_INPUT_ROWS + CNTR_NAME_SEL_254_NUM_INPUT_ROWS - CNTR_NAME_SEL_257_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_254_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_257_TIME_TAKEN + CNTR_NAME_SEL_254_TIME_TAKEN - CNTR_NAME_SEL_257_FATAL_ERROR + CNTR_NAME_SEL_254_FATAL_ERROR - SEL_257 + SEL_254 @@ -801,16 +801,16 @@ - CNTR_NAME_TS_256_NUM_INPUT_ROWS + CNTR_NAME_TS_253_NUM_INPUT_ROWS - CNTR_NAME_TS_256_NUM_OUTPUT_ROWS + CNTR_NAME_TS_253_NUM_OUTPUT_ROWS - CNTR_NAME_TS_256_TIME_TAKEN + CNTR_NAME_TS_253_TIME_TAKEN - CNTR_NAME_TS_256_FATAL_ERROR + CNTR_NAME_TS_253_FATAL_ERROR @@ -822,7 +822,7 @@ - TS_256 + TS_253 Index: ql/src/test/results/compiler/plan/input8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input8.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input8.q.xml (working copy) @@ -201,21 +201,21 @@ - CNTR_NAME_FS_268_NUM_INPUT_ROWS + CNTR_NAME_FS_265_NUM_INPUT_ROWS - CNTR_NAME_FS_268_NUM_OUTPUT_ROWS + CNTR_NAME_FS_265_NUM_OUTPUT_ROWS - CNTR_NAME_FS_268_TIME_TAKEN + CNTR_NAME_FS_265_TIME_TAKEN - CNTR_NAME_FS_268_FATAL_ERROR + CNTR_NAME_FS_265_FATAL_ERROR - FS_268 + FS_265 @@ -438,21 +438,21 @@ - CNTR_NAME_SEL_267_NUM_INPUT_ROWS + CNTR_NAME_SEL_264_NUM_INPUT_ROWS - CNTR_NAME_SEL_267_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_264_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_267_TIME_TAKEN + CNTR_NAME_SEL_264_TIME_TAKEN - CNTR_NAME_SEL_267_FATAL_ERROR + CNTR_NAME_SEL_264_FATAL_ERROR - SEL_267 + SEL_264 @@ -525,16 +525,16 @@ - CNTR_NAME_TS_266_NUM_INPUT_ROWS + CNTR_NAME_TS_263_NUM_INPUT_ROWS - CNTR_NAME_TS_266_NUM_OUTPUT_ROWS + CNTR_NAME_TS_263_NUM_OUTPUT_ROWS - CNTR_NAME_TS_266_TIME_TAKEN + CNTR_NAME_TS_263_TIME_TAKEN - CNTR_NAME_TS_266_FATAL_ERROR + CNTR_NAME_TS_263_FATAL_ERROR @@ -546,7 +546,7 @@ - TS_266 + TS_263 Index: ql/src/test/results/compiler/plan/input9.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input9.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input9.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060113 + 1310382325 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_278_NUM_INPUT_ROWS + CNTR_NAME_FS_275_NUM_INPUT_ROWS - CNTR_NAME_FS_278_NUM_OUTPUT_ROWS + CNTR_NAME_FS_275_NUM_OUTPUT_ROWS - CNTR_NAME_FS_278_TIME_TAKEN + CNTR_NAME_FS_275_TIME_TAKEN - CNTR_NAME_FS_278_FATAL_ERROR + CNTR_NAME_FS_275_FATAL_ERROR - FS_278 + FS_275 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_277_NUM_INPUT_ROWS + CNTR_NAME_TS_274_NUM_INPUT_ROWS - CNTR_NAME_TS_277_NUM_OUTPUT_ROWS + CNTR_NAME_TS_274_NUM_OUTPUT_ROWS - CNTR_NAME_TS_277_TIME_TAKEN + CNTR_NAME_TS_274_TIME_TAKEN - CNTR_NAME_TS_277_FATAL_ERROR + CNTR_NAME_TS_274_FATAL_ERROR - TS_277 + TS_274 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060113 + 1310382325 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10002 @@ -524,11 +524,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1304060111 + 1310382325 @@ -586,11 +586,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1304060111 + 1310382325 @@ -614,242 +614,137 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-55-13_445_1153330900988922829/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_275_NUM_INPUT_ROWS - - - CNTR_NAME_FS_275_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_275_TIME_TAKEN - - - CNTR_NAME_FS_275_FATAL_ERROR - - - - - FS_275 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - key - - - src1 - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10002 - - _col0 - - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-05-25_965_5264708180261866853/-ext-10000/ + + + + + 1 + - CNTR_NAME_SEL_274_NUM_INPUT_ROWS + CNTR_NAME_FS_272_NUM_INPUT_ROWS - CNTR_NAME_SEL_274_NUM_OUTPUT_ROWS + CNTR_NAME_FS_272_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_274_TIME_TAKEN + CNTR_NAME_FS_272_TIME_TAKEN - CNTR_NAME_SEL_274_FATAL_ERROR + CNTR_NAME_FS_272_FATAL_ERROR - SEL_274 + FS_272 - + - - - - - - - _col0 - - - - - void - - - - - - - - - _col1 - - - src1 - - - - - - - - - + - - - - - - - - - - - - - - - - - - - - - + + + + _col1 + + + key - - + + src1 - - - boolean - - + + + _col0 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_273_NUM_INPUT_ROWS + CNTR_NAME_SEL_271_NUM_INPUT_ROWS - CNTR_NAME_FIL_273_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_271_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_273_TIME_TAKEN + CNTR_NAME_SEL_271_TIME_TAKEN - CNTR_NAME_FIL_273_FATAL_ERROR + CNTR_NAME_SEL_271_FATAL_ERROR - FIL_273 + SEL_271 @@ -863,10 +758,24 @@ - + - key + _col0 + + + + void + + + + + + + + + _col1 + src1 @@ -909,7 +818,11 @@ - + + + boolean + + @@ -918,21 +831,21 @@ - CNTR_NAME_FIL_276_NUM_INPUT_ROWS + CNTR_NAME_FIL_273_NUM_INPUT_ROWS - CNTR_NAME_FIL_276_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_273_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_276_TIME_TAKEN + CNTR_NAME_FIL_273_TIME_TAKEN - CNTR_NAME_FIL_276_FATAL_ERROR + CNTR_NAME_FIL_273_FATAL_ERROR - FIL_276 + FIL_273 @@ -946,7 +859,17 @@ - + + + key + + + src1 + + + + + @@ -1018,16 +941,16 @@ - CNTR_NAME_TS_272_NUM_INPUT_ROWS + CNTR_NAME_TS_269_NUM_INPUT_ROWS - CNTR_NAME_TS_272_NUM_OUTPUT_ROWS + CNTR_NAME_TS_269_NUM_OUTPUT_ROWS - CNTR_NAME_TS_272_TIME_TAKEN + CNTR_NAME_TS_269_TIME_TAKEN - CNTR_NAME_TS_272_FATAL_ERROR + CNTR_NAME_TS_269_FATAL_ERROR @@ -1039,7 +962,7 @@ - TS_272 + TS_269 @@ -1061,7 +984,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 src1 @@ -1073,7 +996,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 src1 @@ -1130,11 +1053,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1304060111 + 1310382325 @@ -1192,11 +1115,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src1 transient_lastDdlTime - 1304060111 + 1310382325 Index: ql/src/test/results/compiler/plan/input_part1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -75,11 +75,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 transient_lastDdlTime - 1304060114 + 1310382326 @@ -141,11 +141,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1304060114 + 1310382326 @@ -169,279 +169,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-04-28_23-55-23_051_5537314784500823155/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-04-28_23-55-23_051_5537314784500823155/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3 - - - serialization.format - 1 - - - columns.types - string:string:string:string - - - - - - - 1 - - + + + + + file:/tmp/amarsri/hive_2011-07-11_04-05-29_362_7332883780675641825/-ext-10001 + + + 1 + + + file:/tmp/amarsri/hive_2011-07-11_04-05-29_362_7332883780675641825/-ext-10001/ + + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - CNTR_NAME_FS_289_NUM_INPUT_ROWS - - - CNTR_NAME_FS_289_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_289_TIME_TAKEN - - - CNTR_NAME_FS_289_FATAL_ERROR - - + + org.apache.hadoop.mapred.TextInputFormat - - FS_289 + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - + + + + columns + _col0,_col1,_col2,_col3 - - - - - - - - - - _col0 - - - - - - - - string - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - + + serialization.format + 1 + + columns.types + string:string:string:string + - - - - - - _col3 - - - ds - - - true - - - srcpart - - - - - + + 1 - - _col2 - - - hr - - - true - - - srcpart - - - - - - - - _col1 - - - value - - - srcpart - - - - - - - - _col0 - - - key - - - srcpart - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - - - - CNTR_NAME_SEL_288_NUM_INPUT_ROWS + CNTR_NAME_FS_285_NUM_INPUT_ROWS - CNTR_NAME_SEL_288_NUM_OUTPUT_ROWS + CNTR_NAME_FS_285_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_288_TIME_TAKEN + CNTR_NAME_FS_285_TIME_TAKEN - CNTR_NAME_SEL_288_FATAL_ERROR + CNTR_NAME_FS_285_FATAL_ERROR - SEL_288 + FS_285 - + @@ -451,30 +251,28 @@ - - key - _col0 - srcpart + - + + + string + + - - value - _col1 - srcpart + @@ -483,14 +281,11 @@ - - hr - _col2 - srcpart + @@ -499,14 +294,11 @@ - - ds - _col3 - srcpart + @@ -521,181 +313,126 @@ - - - - - - - - - - - - - - - - - - key - - - srcpart - - - - - - - - - - - - int - - - - - 100 - - - - - - - - - - - - boolean - - - - - - - - - - - - - ds - - - true - - - srcpart - - - - - - - - - - - - - 2008-04-08 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - hr - - - true - - - srcpart - - - - - - - - - - - - - 12 - - - - - - - - - - - - - - + + + + _col3 + + + ds - - + + true + + srcpart + - + + + _col2 + + + hr + + + true + + + srcpart + + + + + + + + _col1 + + + value + + + srcpart + + + + + + + + _col0 + + + key + + + srcpart + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + + + - CNTR_NAME_FIL_287_NUM_INPUT_ROWS + CNTR_NAME_SEL_284_NUM_INPUT_ROWS - CNTR_NAME_FIL_287_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_284_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_287_TIME_TAKEN + CNTR_NAME_SEL_284_TIME_TAKEN - CNTR_NAME_FIL_287_FATAL_ERROR + CNTR_NAME_SEL_284_FATAL_ERROR - FIL_287 + SEL_284 @@ -709,10 +446,13 @@ - - + + key + + _col0 + srcpart @@ -722,10 +462,13 @@ - - + + value + + _col1 + srcpart @@ -735,9 +478,12 @@ - + + + hr + - ds + _col2 srcpart @@ -748,9 +494,12 @@ - + + + ds + - hr + _col3 srcpart @@ -790,7 +539,11 @@ - + + + int + + 100 @@ -803,7 +556,11 @@ - + + + boolean + + @@ -812,21 +569,21 @@ - CNTR_NAME_FIL_290_NUM_INPUT_ROWS + CNTR_NAME_FIL_286_NUM_INPUT_ROWS - CNTR_NAME_FIL_290_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_286_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_290_TIME_TAKEN + CNTR_NAME_FIL_286_TIME_TAKEN - CNTR_NAME_FIL_290_FATAL_ERROR + CNTR_NAME_FIL_286_FATAL_ERROR - FIL_290 + FIL_286 @@ -840,16 +597,56 @@ - + + + key + + + srcpart + + + + + - + + + value + + + srcpart + + + + + - + + + ds + + + srcpart + + + + + - + + + hr + + + srcpart + + + + + @@ -908,16 +705,16 @@ - CNTR_NAME_TS_286_NUM_INPUT_ROWS + CNTR_NAME_TS_282_NUM_INPUT_ROWS - CNTR_NAME_TS_286_NUM_OUTPUT_ROWS + CNTR_NAME_TS_282_NUM_OUTPUT_ROWS - CNTR_NAME_TS_286_TIME_TAKEN + CNTR_NAME_TS_282_TIME_TAKEN - CNTR_NAME_TS_286_FATAL_ERROR + CNTR_NAME_TS_282_FATAL_ERROR @@ -932,7 +729,7 @@ - TS_286 + TS_282 @@ -951,7 +748,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 srcpart @@ -963,7 +760,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 hr=12 @@ -1033,11 +830,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=12 transient_lastDdlTime - 1304060115 + 1310382326 @@ -1099,11 +896,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1304060114 + 1310382326 Index: ql/src/test/results/compiler/plan/input_testsequencefile.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (working copy) @@ -159,21 +159,21 @@ - CNTR_NAME_FS_300_NUM_INPUT_ROWS + CNTR_NAME_FS_295_NUM_INPUT_ROWS - CNTR_NAME_FS_300_NUM_OUTPUT_ROWS + CNTR_NAME_FS_295_NUM_OUTPUT_ROWS - CNTR_NAME_FS_300_TIME_TAKEN + CNTR_NAME_FS_295_TIME_TAKEN - CNTR_NAME_FS_300_FATAL_ERROR + CNTR_NAME_FS_295_FATAL_ERROR - FS_300 + FS_295 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_299_NUM_INPUT_ROWS + CNTR_NAME_TS_294_NUM_INPUT_ROWS - CNTR_NAME_TS_299_NUM_OUTPUT_ROWS + CNTR_NAME_TS_294_NUM_OUTPUT_ROWS - CNTR_NAME_TS_299_TIME_TAKEN + CNTR_NAME_TS_294_TIME_TAKEN - CNTR_NAME_TS_299_FATAL_ERROR + CNTR_NAME_TS_294_FATAL_ERROR - TS_299 + TS_294 @@ -643,21 +643,21 @@ - CNTR_NAME_FS_298_NUM_INPUT_ROWS + CNTR_NAME_FS_293_NUM_INPUT_ROWS - CNTR_NAME_FS_298_NUM_OUTPUT_ROWS + CNTR_NAME_FS_293_NUM_OUTPUT_ROWS - CNTR_NAME_FS_298_TIME_TAKEN + CNTR_NAME_FS_293_TIME_TAKEN - CNTR_NAME_FS_298_FATAL_ERROR + CNTR_NAME_FS_293_FATAL_ERROR - FS_298 + FS_293 @@ -732,21 +732,21 @@ - CNTR_NAME_SEL_297_NUM_INPUT_ROWS + CNTR_NAME_SEL_292_NUM_INPUT_ROWS - CNTR_NAME_SEL_297_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_292_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_297_TIME_TAKEN + CNTR_NAME_SEL_292_TIME_TAKEN - CNTR_NAME_SEL_297_FATAL_ERROR + CNTR_NAME_SEL_292_FATAL_ERROR - SEL_297 + SEL_292 @@ -806,16 +806,16 @@ - CNTR_NAME_TS_296_NUM_INPUT_ROWS + CNTR_NAME_TS_291_NUM_INPUT_ROWS - CNTR_NAME_TS_296_NUM_OUTPUT_ROWS + CNTR_NAME_TS_291_NUM_OUTPUT_ROWS - CNTR_NAME_TS_296_TIME_TAKEN + CNTR_NAME_TS_291_TIME_TAKEN - CNTR_NAME_TS_296_FATAL_ERROR + CNTR_NAME_TS_291_FATAL_ERROR @@ -830,7 +830,7 @@ - TS_296 + TS_291 Index: ql/src/test/results/compiler/plan/input_testxpath.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input_testxpath.q.xml (working copy) @@ -209,21 +209,21 @@ - CNTR_NAME_FS_308_NUM_INPUT_ROWS + CNTR_NAME_FS_303_NUM_INPUT_ROWS - CNTR_NAME_FS_308_NUM_OUTPUT_ROWS + CNTR_NAME_FS_303_NUM_OUTPUT_ROWS - CNTR_NAME_FS_308_TIME_TAKEN + CNTR_NAME_FS_303_TIME_TAKEN - CNTR_NAME_FS_308_FATAL_ERROR + CNTR_NAME_FS_303_FATAL_ERROR - FS_308 + FS_303 @@ -497,21 +497,21 @@ - CNTR_NAME_SEL_307_NUM_INPUT_ROWS + CNTR_NAME_SEL_302_NUM_INPUT_ROWS - CNTR_NAME_SEL_307_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_302_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_307_TIME_TAKEN + CNTR_NAME_SEL_302_TIME_TAKEN - CNTR_NAME_SEL_307_FATAL_ERROR + CNTR_NAME_SEL_302_FATAL_ERROR - SEL_307 + SEL_302 @@ -584,16 +584,16 @@ - CNTR_NAME_TS_306_NUM_INPUT_ROWS + CNTR_NAME_TS_301_NUM_INPUT_ROWS - CNTR_NAME_TS_306_NUM_OUTPUT_ROWS + CNTR_NAME_TS_301_NUM_OUTPUT_ROWS - CNTR_NAME_TS_306_TIME_TAKEN + CNTR_NAME_TS_301_TIME_TAKEN - CNTR_NAME_TS_306_FATAL_ERROR + CNTR_NAME_TS_301_FATAL_ERROR @@ -611,7 +611,7 @@ - TS_306 + TS_301 Index: ql/src/test/results/compiler/plan/input_testxpath2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath2.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/input_testxpath2.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -66,11 +66,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1304060151 + 1310382339 @@ -132,11 +132,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1304060151 + 1310382339 @@ -160,328 +160,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-04-28_23-55-52_168_749214230069396820/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-04-28_23-55-52_168_749214230069396820/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2 - - - serialization.format - 1 - - - columns.types - int:int:int - - - - - - - 1 - - + + + + + file:/tmp/amarsri/hive_2011-07-11_04-05-39_617_1990531796790632873/-ext-10001 + + + 1 + + + file:/tmp/amarsri/hive_2011-07-11_04-05-39_617_1990531796790632873/-ext-10001/ + + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - CNTR_NAME_FS_315_NUM_INPUT_ROWS - - - CNTR_NAME_FS_315_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_315_TIME_TAKEN - - - CNTR_NAME_FS_315_FATAL_ERROR - - + + org.apache.hadoop.mapred.TextInputFormat - - FS_315 + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - + + + + columns + _col0,_col1,_col2 - - - - - - - - - - _col0 - - - - - - - - int - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - + + serialization.format + 1 - - - - - - - - - - _col2 - - - - - - - mstringstring - - - src_thrift - - - - - - - string - - - - - - - - - + + columns.types + int:int:int - - - - - - - - _col1 - - - - - - - lintstring - - - src_thrift - - - - - - - - - myint - - - mystring - - - underscore_int - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + 1 - - _col0 - - - - - - - lint - - - src_thrift - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - - - - CNTR_NAME_SEL_314_NUM_INPUT_ROWS + CNTR_NAME_FS_310_NUM_INPUT_ROWS - CNTR_NAME_SEL_314_NUM_OUTPUT_ROWS + CNTR_NAME_FS_310_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_314_TIME_TAKEN + CNTR_NAME_FS_310_TIME_TAKEN - CNTR_NAME_SEL_314_FATAL_ERROR + CNTR_NAME_FS_310_FATAL_ERROR - SEL_314 + FS_310 - + @@ -491,25 +242,29 @@ - - _c0 - _col0 + + + - + + + int + + - - _c1 - _col1 + + + @@ -517,12 +272,12 @@ - - _c2 - _col2 + + + @@ -536,114 +291,188 @@ - - - - + + + + _col2 + - - - - - - - lint - - - src_thrift - - - - - - - + + + mstringstring - - + + src_thrift - - - boolean + + + + + string + + + + + + + + + + + + + + + + + _col1 + + + - - - - - - + + + lintstring + + + src_thrift + + + + + + - - - mstringstring - - - src_thrift - - - - - + myint + + mystring + + + underscore_int + - - + + + + + + + + + + + + - - - - - + + + + + + + + + + + + + + _col0 + + + + + + + lint + + src_thrift + - + + + + + - + - + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + + + - CNTR_NAME_FIL_313_NUM_INPUT_ROWS + CNTR_NAME_SEL_309_NUM_INPUT_ROWS - CNTR_NAME_FIL_313_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_309_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_313_TIME_TAKEN + CNTR_NAME_SEL_309_TIME_TAKEN - CNTR_NAME_FIL_313_FATAL_ERROR + CNTR_NAME_SEL_309_FATAL_ERROR - FIL_313 + SEL_309 @@ -657,41 +486,41 @@ - + + + _c0 + - lint + _col0 - - src_thrift - - + - + + + _c1 + - lintstring + _col1 - - src_thrift - - + - + + + _c2 + - mstringstring + _col2 - - src_thrift - - + @@ -732,7 +561,11 @@ - + + + boolean + + @@ -792,21 +625,21 @@ - CNTR_NAME_FIL_316_NUM_INPUT_ROWS + CNTR_NAME_FIL_311_NUM_INPUT_ROWS - CNTR_NAME_FIL_316_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_311_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_316_TIME_TAKEN + CNTR_NAME_FIL_311_TIME_TAKEN - CNTR_NAME_FIL_316_FATAL_ERROR + CNTR_NAME_FIL_311_FATAL_ERROR - FIL_316 + FIL_311 @@ -846,7 +679,17 @@ - + + + lint + + + src_thrift + + + + + @@ -866,10 +709,30 @@ - + + + lintstring + + + src_thrift + + + + + - + + + mstringstring + + + src_thrift + + + + + @@ -928,16 +791,16 @@ - CNTR_NAME_TS_312_NUM_INPUT_ROWS + CNTR_NAME_TS_307_NUM_INPUT_ROWS - CNTR_NAME_TS_312_NUM_OUTPUT_ROWS + CNTR_NAME_TS_307_NUM_OUTPUT_ROWS - CNTR_NAME_TS_312_TIME_TAKEN + CNTR_NAME_TS_307_TIME_TAKEN - CNTR_NAME_TS_312_FATAL_ERROR + CNTR_NAME_TS_307_FATAL_ERROR @@ -955,7 +818,7 @@ - TS_312 + TS_307 @@ -974,7 +837,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift src_thrift @@ -986,7 +849,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift src_thrift @@ -1047,11 +910,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1304060151 + 1310382339 @@ -1113,11 +976,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src_thrift + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src_thrift transient_lastDdlTime - 1304060151 + 1310382339 Index: ql/src/test/results/compiler/plan/join1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join1.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/join1.q.xml (working copy) @@ -555,21 +555,21 @@ - CNTR_NAME_RS_325_NUM_INPUT_ROWS + CNTR_NAME_RS_319_NUM_INPUT_ROWS - CNTR_NAME_RS_325_NUM_OUTPUT_ROWS + CNTR_NAME_RS_319_NUM_OUTPUT_ROWS - CNTR_NAME_RS_325_TIME_TAKEN + CNTR_NAME_RS_319_TIME_TAKEN - CNTR_NAME_RS_325_FATAL_ERROR + CNTR_NAME_RS_319_FATAL_ERROR - RS_325 + RS_319 @@ -616,16 +616,16 @@ - CNTR_NAME_TS_322_NUM_INPUT_ROWS + CNTR_NAME_TS_316_NUM_INPUT_ROWS - CNTR_NAME_TS_322_NUM_OUTPUT_ROWS + CNTR_NAME_TS_316_NUM_OUTPUT_ROWS - CNTR_NAME_TS_322_TIME_TAKEN + CNTR_NAME_TS_316_TIME_TAKEN - CNTR_NAME_TS_322_FATAL_ERROR + CNTR_NAME_TS_316_FATAL_ERROR @@ -640,7 +640,7 @@ - TS_322 + TS_316 @@ -854,21 +854,21 @@ - CNTR_NAME_RS_324_NUM_INPUT_ROWS + CNTR_NAME_RS_318_NUM_INPUT_ROWS - CNTR_NAME_RS_324_NUM_OUTPUT_ROWS + CNTR_NAME_RS_318_NUM_OUTPUT_ROWS - CNTR_NAME_RS_324_TIME_TAKEN + CNTR_NAME_RS_318_TIME_TAKEN - CNTR_NAME_RS_324_FATAL_ERROR + CNTR_NAME_RS_318_FATAL_ERROR - RS_324 + RS_318 @@ -915,16 +915,16 @@ - CNTR_NAME_TS_323_NUM_INPUT_ROWS + CNTR_NAME_TS_317_NUM_INPUT_ROWS - CNTR_NAME_TS_323_NUM_OUTPUT_ROWS + CNTR_NAME_TS_317_NUM_OUTPUT_ROWS - CNTR_NAME_TS_323_TIME_TAKEN + CNTR_NAME_TS_317_TIME_TAKEN - CNTR_NAME_TS_323_FATAL_ERROR + CNTR_NAME_TS_317_FATAL_ERROR @@ -936,7 +936,7 @@ - TS_323 + TS_317 @@ -1215,21 +1215,21 @@ - CNTR_NAME_FS_328_NUM_INPUT_ROWS + CNTR_NAME_FS_322_NUM_INPUT_ROWS - CNTR_NAME_FS_328_NUM_OUTPUT_ROWS + CNTR_NAME_FS_322_NUM_OUTPUT_ROWS - CNTR_NAME_FS_328_TIME_TAKEN + CNTR_NAME_FS_322_TIME_TAKEN - CNTR_NAME_FS_328_FATAL_ERROR + CNTR_NAME_FS_322_FATAL_ERROR - FS_328 + FS_322 @@ -1335,21 +1335,21 @@ - CNTR_NAME_SEL_327_NUM_INPUT_ROWS + CNTR_NAME_SEL_321_NUM_INPUT_ROWS - CNTR_NAME_SEL_327_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_321_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_327_TIME_TAKEN + CNTR_NAME_SEL_321_TIME_TAKEN - CNTR_NAME_SEL_327_FATAL_ERROR + CNTR_NAME_SEL_321_FATAL_ERROR - SEL_327 + SEL_321 @@ -1537,21 +1537,21 @@ - CNTR_NAME_JOIN_326_NUM_INPUT_ROWS + CNTR_NAME_JOIN_320_NUM_INPUT_ROWS - CNTR_NAME_JOIN_326_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_320_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_326_TIME_TAKEN + CNTR_NAME_JOIN_320_TIME_TAKEN - CNTR_NAME_JOIN_326_FATAL_ERROR + CNTR_NAME_JOIN_320_FATAL_ERROR - JOIN_326 + JOIN_320 Index: ql/src/test/results/compiler/plan/join2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join2.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/join2.q.xml (working copy) @@ -472,21 +472,21 @@ - CNTR_NAME_RS_342_NUM_INPUT_ROWS + CNTR_NAME_RS_336_NUM_INPUT_ROWS - CNTR_NAME_RS_342_NUM_OUTPUT_ROWS + CNTR_NAME_RS_336_NUM_OUTPUT_ROWS - CNTR_NAME_RS_342_TIME_TAKEN + CNTR_NAME_RS_336_TIME_TAKEN - CNTR_NAME_RS_342_FATAL_ERROR + CNTR_NAME_RS_336_FATAL_ERROR - RS_342 + RS_336 @@ -523,21 +523,21 @@ - CNTR_NAME_TS_348_NUM_INPUT_ROWS + CNTR_NAME_TS_342_NUM_INPUT_ROWS - CNTR_NAME_TS_348_NUM_OUTPUT_ROWS + CNTR_NAME_TS_342_NUM_OUTPUT_ROWS - CNTR_NAME_TS_348_TIME_TAKEN + CNTR_NAME_TS_342_TIME_TAKEN - CNTR_NAME_TS_348_FATAL_ERROR + CNTR_NAME_TS_342_FATAL_ERROR - TS_348 + TS_342 @@ -739,21 +739,21 @@ - CNTR_NAME_RS_343_NUM_INPUT_ROWS + CNTR_NAME_RS_337_NUM_INPUT_ROWS - CNTR_NAME_RS_343_NUM_OUTPUT_ROWS + CNTR_NAME_RS_337_NUM_OUTPUT_ROWS - CNTR_NAME_RS_343_TIME_TAKEN + CNTR_NAME_RS_337_TIME_TAKEN - CNTR_NAME_RS_343_FATAL_ERROR + CNTR_NAME_RS_337_FATAL_ERROR - RS_343 + RS_337 @@ -800,16 +800,16 @@ - CNTR_NAME_TS_337_NUM_INPUT_ROWS + CNTR_NAME_TS_331_NUM_INPUT_ROWS - CNTR_NAME_TS_337_NUM_OUTPUT_ROWS + CNTR_NAME_TS_331_NUM_OUTPUT_ROWS - CNTR_NAME_TS_337_TIME_TAKEN + CNTR_NAME_TS_331_TIME_TAKEN - CNTR_NAME_TS_337_FATAL_ERROR + CNTR_NAME_TS_331_FATAL_ERROR @@ -824,7 +824,7 @@ - TS_337 + TS_331 @@ -1161,21 +1161,21 @@ - CNTR_NAME_FS_346_NUM_INPUT_ROWS + CNTR_NAME_FS_340_NUM_INPUT_ROWS - CNTR_NAME_FS_346_NUM_OUTPUT_ROWS + CNTR_NAME_FS_340_NUM_OUTPUT_ROWS - CNTR_NAME_FS_346_TIME_TAKEN + CNTR_NAME_FS_340_TIME_TAKEN - CNTR_NAME_FS_346_FATAL_ERROR + CNTR_NAME_FS_340_FATAL_ERROR - FS_346 + FS_340 @@ -1281,21 +1281,21 @@ - CNTR_NAME_SEL_345_NUM_INPUT_ROWS + CNTR_NAME_SEL_339_NUM_INPUT_ROWS - CNTR_NAME_SEL_345_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_339_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_345_TIME_TAKEN + CNTR_NAME_SEL_339_TIME_TAKEN - CNTR_NAME_SEL_345_FATAL_ERROR + CNTR_NAME_SEL_339_FATAL_ERROR - SEL_345 + SEL_339 @@ -1499,21 +1499,21 @@ - CNTR_NAME_JOIN_344_NUM_INPUT_ROWS + CNTR_NAME_JOIN_338_NUM_INPUT_ROWS - CNTR_NAME_JOIN_344_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_338_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_344_TIME_TAKEN + CNTR_NAME_JOIN_338_TIME_TAKEN - CNTR_NAME_JOIN_344_FATAL_ERROR + CNTR_NAME_JOIN_338_FATAL_ERROR - JOIN_344 + JOIN_338 @@ -2016,21 +2016,21 @@ - CNTR_NAME_RS_340_NUM_INPUT_ROWS + CNTR_NAME_RS_334_NUM_INPUT_ROWS - CNTR_NAME_RS_340_NUM_OUTPUT_ROWS + CNTR_NAME_RS_334_NUM_OUTPUT_ROWS - CNTR_NAME_RS_340_TIME_TAKEN + CNTR_NAME_RS_334_TIME_TAKEN - CNTR_NAME_RS_340_FATAL_ERROR + CNTR_NAME_RS_334_FATAL_ERROR - RS_340 + RS_334 @@ -2077,16 +2077,16 @@ - CNTR_NAME_TS_336_NUM_INPUT_ROWS + CNTR_NAME_TS_330_NUM_INPUT_ROWS - CNTR_NAME_TS_336_NUM_OUTPUT_ROWS + CNTR_NAME_TS_330_NUM_OUTPUT_ROWS - CNTR_NAME_TS_336_TIME_TAKEN + CNTR_NAME_TS_330_TIME_TAKEN - CNTR_NAME_TS_336_FATAL_ERROR + CNTR_NAME_TS_330_FATAL_ERROR @@ -2098,7 +2098,7 @@ - TS_336 + TS_330 @@ -2308,21 +2308,21 @@ - CNTR_NAME_RS_339_NUM_INPUT_ROWS + CNTR_NAME_RS_333_NUM_INPUT_ROWS - CNTR_NAME_RS_339_NUM_OUTPUT_ROWS + CNTR_NAME_RS_333_NUM_OUTPUT_ROWS - CNTR_NAME_RS_339_TIME_TAKEN + CNTR_NAME_RS_333_TIME_TAKEN - CNTR_NAME_RS_339_FATAL_ERROR + CNTR_NAME_RS_333_FATAL_ERROR - RS_339 + RS_333 @@ -2369,16 +2369,16 @@ - CNTR_NAME_TS_338_NUM_INPUT_ROWS + CNTR_NAME_TS_332_NUM_INPUT_ROWS - CNTR_NAME_TS_338_NUM_OUTPUT_ROWS + CNTR_NAME_TS_332_NUM_OUTPUT_ROWS - CNTR_NAME_TS_338_TIME_TAKEN + CNTR_NAME_TS_332_TIME_TAKEN - CNTR_NAME_TS_338_FATAL_ERROR + CNTR_NAME_TS_332_FATAL_ERROR @@ -2390,7 +2390,7 @@ - TS_338 + TS_332 @@ -2653,21 +2653,21 @@ - CNTR_NAME_FS_347_NUM_INPUT_ROWS + CNTR_NAME_FS_341_NUM_INPUT_ROWS - CNTR_NAME_FS_347_NUM_OUTPUT_ROWS + CNTR_NAME_FS_341_NUM_OUTPUT_ROWS - CNTR_NAME_FS_347_TIME_TAKEN + CNTR_NAME_FS_341_TIME_TAKEN - CNTR_NAME_FS_347_FATAL_ERROR + CNTR_NAME_FS_341_FATAL_ERROR - FS_347 + FS_341 @@ -2824,21 +2824,21 @@ - CNTR_NAME_JOIN_341_NUM_INPUT_ROWS + CNTR_NAME_JOIN_335_NUM_INPUT_ROWS - CNTR_NAME_JOIN_341_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_335_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_341_TIME_TAKEN + CNTR_NAME_JOIN_335_TIME_TAKEN - CNTR_NAME_JOIN_341_FATAL_ERROR + CNTR_NAME_JOIN_335_FATAL_ERROR - JOIN_341 + JOIN_335 Index: ql/src/test/results/compiler/plan/join3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join3.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/join3.q.xml (working copy) @@ -664,21 +664,21 @@ - CNTR_NAME_RS_366_NUM_INPUT_ROWS + CNTR_NAME_RS_360_NUM_INPUT_ROWS - CNTR_NAME_RS_366_NUM_OUTPUT_ROWS + CNTR_NAME_RS_360_NUM_OUTPUT_ROWS - CNTR_NAME_RS_366_TIME_TAKEN + CNTR_NAME_RS_360_TIME_TAKEN - CNTR_NAME_RS_366_FATAL_ERROR + CNTR_NAME_RS_360_FATAL_ERROR - RS_366 + RS_360 @@ -711,16 +711,16 @@ - CNTR_NAME_TS_362_NUM_INPUT_ROWS + CNTR_NAME_TS_356_NUM_INPUT_ROWS - CNTR_NAME_TS_362_NUM_OUTPUT_ROWS + CNTR_NAME_TS_356_NUM_OUTPUT_ROWS - CNTR_NAME_TS_362_TIME_TAKEN + CNTR_NAME_TS_356_TIME_TAKEN - CNTR_NAME_TS_362_FATAL_ERROR + CNTR_NAME_TS_356_FATAL_ERROR @@ -732,7 +732,7 @@ - TS_362 + TS_356 @@ -949,21 +949,21 @@ - CNTR_NAME_RS_367_NUM_INPUT_ROWS + CNTR_NAME_RS_361_NUM_INPUT_ROWS - CNTR_NAME_RS_367_NUM_OUTPUT_ROWS + CNTR_NAME_RS_361_NUM_OUTPUT_ROWS - CNTR_NAME_RS_367_TIME_TAKEN + CNTR_NAME_RS_361_TIME_TAKEN - CNTR_NAME_RS_367_FATAL_ERROR + CNTR_NAME_RS_361_FATAL_ERROR - RS_367 + RS_361 @@ -1010,16 +1010,16 @@ - CNTR_NAME_TS_363_NUM_INPUT_ROWS + CNTR_NAME_TS_357_NUM_INPUT_ROWS - CNTR_NAME_TS_363_NUM_OUTPUT_ROWS + CNTR_NAME_TS_357_NUM_OUTPUT_ROWS - CNTR_NAME_TS_363_TIME_TAKEN + CNTR_NAME_TS_357_TIME_TAKEN - CNTR_NAME_TS_363_FATAL_ERROR + CNTR_NAME_TS_357_FATAL_ERROR @@ -1034,7 +1034,7 @@ - TS_363 + TS_357 @@ -1244,21 +1244,21 @@ - CNTR_NAME_RS_365_NUM_INPUT_ROWS + CNTR_NAME_RS_359_NUM_INPUT_ROWS - CNTR_NAME_RS_365_NUM_OUTPUT_ROWS + CNTR_NAME_RS_359_NUM_OUTPUT_ROWS - CNTR_NAME_RS_365_TIME_TAKEN + CNTR_NAME_RS_359_TIME_TAKEN - CNTR_NAME_RS_365_FATAL_ERROR + CNTR_NAME_RS_359_FATAL_ERROR - RS_365 + RS_359 @@ -1305,16 +1305,16 @@ - CNTR_NAME_TS_364_NUM_INPUT_ROWS + CNTR_NAME_TS_358_NUM_INPUT_ROWS - CNTR_NAME_TS_364_NUM_OUTPUT_ROWS + CNTR_NAME_TS_358_NUM_OUTPUT_ROWS - CNTR_NAME_TS_364_TIME_TAKEN + CNTR_NAME_TS_358_TIME_TAKEN - CNTR_NAME_TS_364_FATAL_ERROR + CNTR_NAME_TS_358_FATAL_ERROR @@ -1326,7 +1326,7 @@ - TS_364 + TS_358 @@ -1608,21 +1608,21 @@ - CNTR_NAME_FS_370_NUM_INPUT_ROWS + CNTR_NAME_FS_364_NUM_INPUT_ROWS - CNTR_NAME_FS_370_NUM_OUTPUT_ROWS + CNTR_NAME_FS_364_NUM_OUTPUT_ROWS - CNTR_NAME_FS_370_TIME_TAKEN + CNTR_NAME_FS_364_TIME_TAKEN - CNTR_NAME_FS_370_FATAL_ERROR + CNTR_NAME_FS_364_FATAL_ERROR - FS_370 + FS_364 @@ -1728,21 +1728,21 @@ - CNTR_NAME_SEL_369_NUM_INPUT_ROWS + CNTR_NAME_SEL_363_NUM_INPUT_ROWS - CNTR_NAME_SEL_369_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_363_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_369_TIME_TAKEN + CNTR_NAME_SEL_363_TIME_TAKEN - CNTR_NAME_SEL_369_FATAL_ERROR + CNTR_NAME_SEL_363_FATAL_ERROR - SEL_369 + SEL_363 @@ -1964,21 +1964,21 @@ - CNTR_NAME_JOIN_368_NUM_INPUT_ROWS + CNTR_NAME_JOIN_362_NUM_INPUT_ROWS - CNTR_NAME_JOIN_368_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_362_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_368_TIME_TAKEN + CNTR_NAME_JOIN_362_TIME_TAKEN - CNTR_NAME_JOIN_368_FATAL_ERROR + CNTR_NAME_JOIN_362_FATAL_ERROR - JOIN_368 + JOIN_362 Index: ql/src/test/results/compiler/plan/join4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join4.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/join4.q.xml (working copy) @@ -284,249 +284,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_386_NUM_INPUT_ROWS - - - CNTR_NAME_RS_386_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_386_TIME_TAKEN - - - CNTR_NAME_RS_386_FATAL_ERROR - - - - - RS_386 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -535,18 +320,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -556,31 +390,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_385_NUM_INPUT_ROWS + CNTR_NAME_RS_380_NUM_INPUT_ROWS - CNTR_NAME_SEL_385_NUM_OUTPUT_ROWS + CNTR_NAME_RS_380_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_385_TIME_TAKEN + CNTR_NAME_RS_380_TIME_TAKEN - CNTR_NAME_SEL_385_FATAL_ERROR + CNTR_NAME_RS_380_FATAL_ERROR - SEL_385 + RS_380 - + @@ -591,8 +467,11 @@ - _col0 + VALUE._col0 + + a + @@ -601,8 +480,11 @@ - _col1 + VALUE._col1 + + a + @@ -616,124 +498,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_384_NUM_INPUT_ROWS + CNTR_NAME_SEL_379_NUM_INPUT_ROWS - CNTR_NAME_FIL_384_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_379_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_384_TIME_TAKEN + CNTR_NAME_SEL_379_TIME_TAKEN - CNTR_NAME_FIL_384_FATAL_ERROR + CNTR_NAME_SEL_379_FATAL_ERROR - FIL_384 + SEL_379 @@ -747,26 +585,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -806,7 +638,11 @@ - + + + int + + 10 @@ -819,7 +655,11 @@ - + + + boolean + + @@ -875,21 +715,21 @@ - CNTR_NAME_FIL_392_NUM_INPUT_ROWS + CNTR_NAME_FIL_386_NUM_INPUT_ROWS - CNTR_NAME_FIL_392_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_386_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_392_TIME_TAKEN + CNTR_NAME_FIL_386_TIME_TAKEN - CNTR_NAME_FIL_392_FATAL_ERROR + CNTR_NAME_FIL_386_FATAL_ERROR - FIL_392 + FIL_386 @@ -903,10 +743,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -965,16 +825,16 @@ - CNTR_NAME_TS_383_NUM_INPUT_ROWS + CNTR_NAME_TS_377_NUM_INPUT_ROWS - CNTR_NAME_TS_383_NUM_OUTPUT_ROWS + CNTR_NAME_TS_377_NUM_OUTPUT_ROWS - CNTR_NAME_TS_383_TIME_TAKEN + CNTR_NAME_TS_377_TIME_TAKEN - CNTR_NAME_TS_383_FATAL_ERROR + CNTR_NAME_TS_377_FATAL_ERROR @@ -989,7 +849,7 @@ - TS_383 + TS_377 @@ -1006,252 +866,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_387_NUM_INPUT_ROWS - - - CNTR_NAME_RS_387_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_387_TIME_TAKEN - - - CNTR_NAME_RS_387_FATAL_ERROR - - - - - RS_387 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1260,18 +902,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1281,31 +972,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_382_NUM_INPUT_ROWS + CNTR_NAME_RS_381_NUM_INPUT_ROWS - CNTR_NAME_SEL_382_NUM_OUTPUT_ROWS + CNTR_NAME_RS_381_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_382_TIME_TAKEN + CNTR_NAME_RS_381_TIME_TAKEN - CNTR_NAME_SEL_382_FATAL_ERROR + CNTR_NAME_RS_381_FATAL_ERROR - SEL_382 + RS_381 - + @@ -1316,8 +1052,11 @@ - _col0 + VALUE._col0 + + b + @@ -1326,8 +1065,11 @@ - _col1 + VALUE._col1 + + b + @@ -1341,121 +1083,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_381_NUM_INPUT_ROWS + CNTR_NAME_SEL_376_NUM_INPUT_ROWS - CNTR_NAME_FIL_381_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_376_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_381_TIME_TAKEN + CNTR_NAME_SEL_376_TIME_TAKEN - CNTR_NAME_FIL_381_FATAL_ERROR + CNTR_NAME_SEL_376_FATAL_ERROR - FIL_381 + SEL_376 - + @@ -1464,26 +1170,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1592,21 +1292,21 @@ - CNTR_NAME_FIL_393_NUM_INPUT_ROWS + CNTR_NAME_FIL_387_NUM_INPUT_ROWS - CNTR_NAME_FIL_393_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_387_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_393_TIME_TAKEN + CNTR_NAME_FIL_387_TIME_TAKEN - CNTR_NAME_FIL_393_FATAL_ERROR + CNTR_NAME_FIL_387_FATAL_ERROR - FIL_393 + FIL_387 @@ -1620,10 +1320,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + @@ -1678,16 +1398,16 @@ - CNTR_NAME_TS_380_NUM_INPUT_ROWS + CNTR_NAME_TS_374_NUM_INPUT_ROWS - CNTR_NAME_TS_380_NUM_OUTPUT_ROWS + CNTR_NAME_TS_374_NUM_OUTPUT_ROWS - CNTR_NAME_TS_380_TIME_TAKEN + CNTR_NAME_TS_374_TIME_TAKEN - CNTR_NAME_TS_380_FATAL_ERROR + CNTR_NAME_TS_374_FATAL_ERROR @@ -1702,7 +1422,7 @@ - TS_380 + TS_374 @@ -1943,21 +1663,21 @@ - CNTR_NAME_FS_391_NUM_INPUT_ROWS + CNTR_NAME_FS_385_NUM_INPUT_ROWS - CNTR_NAME_FS_391_NUM_OUTPUT_ROWS + CNTR_NAME_FS_385_NUM_OUTPUT_ROWS - CNTR_NAME_FS_391_TIME_TAKEN + CNTR_NAME_FS_385_TIME_TAKEN - CNTR_NAME_FS_391_FATAL_ERROR + CNTR_NAME_FS_385_FATAL_ERROR - FS_391 + FS_385 @@ -2117,21 +1837,21 @@ - CNTR_NAME_SEL_390_NUM_INPUT_ROWS + CNTR_NAME_SEL_384_NUM_INPUT_ROWS - CNTR_NAME_SEL_390_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_384_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_390_TIME_TAKEN + CNTR_NAME_SEL_384_TIME_TAKEN - CNTR_NAME_SEL_390_FATAL_ERROR + CNTR_NAME_SEL_384_FATAL_ERROR - SEL_390 + SEL_384 @@ -2315,21 +2035,21 @@ - CNTR_NAME_SEL_389_NUM_INPUT_ROWS + CNTR_NAME_SEL_383_NUM_INPUT_ROWS - CNTR_NAME_SEL_389_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_383_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_389_TIME_TAKEN + CNTR_NAME_SEL_383_TIME_TAKEN - CNTR_NAME_SEL_389_FATAL_ERROR + CNTR_NAME_SEL_383_FATAL_ERROR - SEL_389 + SEL_383 @@ -2555,21 +2275,21 @@ - CNTR_NAME_JOIN_388_NUM_INPUT_ROWS + CNTR_NAME_JOIN_382_NUM_INPUT_ROWS - CNTR_NAME_JOIN_388_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_382_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_388_TIME_TAKEN + CNTR_NAME_JOIN_382_TIME_TAKEN - CNTR_NAME_JOIN_388_FATAL_ERROR + CNTR_NAME_JOIN_382_FATAL_ERROR - JOIN_388 + JOIN_382 Index: ql/src/test/results/compiler/plan/join5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join5.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/join5.q.xml (working copy) @@ -284,249 +284,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_414_NUM_INPUT_ROWS - - - CNTR_NAME_RS_414_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_414_TIME_TAKEN - - - CNTR_NAME_RS_414_FATAL_ERROR - - - - - RS_414 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -535,18 +320,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -556,31 +390,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_413_NUM_INPUT_ROWS + CNTR_NAME_RS_406_NUM_INPUT_ROWS - CNTR_NAME_SEL_413_NUM_OUTPUT_ROWS + CNTR_NAME_RS_406_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_413_TIME_TAKEN + CNTR_NAME_RS_406_TIME_TAKEN - CNTR_NAME_SEL_413_FATAL_ERROR + CNTR_NAME_RS_406_FATAL_ERROR - SEL_413 + RS_406 - + @@ -591,8 +467,11 @@ - _col0 + VALUE._col0 + + a + @@ -601,8 +480,11 @@ - _col1 + VALUE._col1 + + a + @@ -616,124 +498,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_412_NUM_INPUT_ROWS + CNTR_NAME_SEL_405_NUM_INPUT_ROWS - CNTR_NAME_FIL_412_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_405_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_412_TIME_TAKEN + CNTR_NAME_SEL_405_TIME_TAKEN - CNTR_NAME_FIL_412_FATAL_ERROR + CNTR_NAME_SEL_405_FATAL_ERROR - FIL_412 + SEL_405 @@ -747,26 +585,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -806,7 +638,11 @@ - + + + int + + 10 @@ -819,7 +655,11 @@ - + + + boolean + + @@ -875,21 +715,21 @@ - CNTR_NAME_FIL_420_NUM_INPUT_ROWS + CNTR_NAME_FIL_412_NUM_INPUT_ROWS - CNTR_NAME_FIL_420_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_412_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_420_TIME_TAKEN + CNTR_NAME_FIL_412_TIME_TAKEN - CNTR_NAME_FIL_420_FATAL_ERROR + CNTR_NAME_FIL_412_FATAL_ERROR - FIL_420 + FIL_412 @@ -903,10 +743,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -965,16 +825,16 @@ - CNTR_NAME_TS_411_NUM_INPUT_ROWS + CNTR_NAME_TS_403_NUM_INPUT_ROWS - CNTR_NAME_TS_411_NUM_OUTPUT_ROWS + CNTR_NAME_TS_403_NUM_OUTPUT_ROWS - CNTR_NAME_TS_411_TIME_TAKEN + CNTR_NAME_TS_403_TIME_TAKEN - CNTR_NAME_TS_411_FATAL_ERROR + CNTR_NAME_TS_403_FATAL_ERROR @@ -989,7 +849,7 @@ - TS_411 + TS_403 @@ -1006,252 +866,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_415_NUM_INPUT_ROWS - - - CNTR_NAME_RS_415_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_415_TIME_TAKEN - - - CNTR_NAME_RS_415_FATAL_ERROR - - - - - RS_415 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1260,18 +902,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1281,31 +972,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_410_NUM_INPUT_ROWS + CNTR_NAME_RS_407_NUM_INPUT_ROWS - CNTR_NAME_SEL_410_NUM_OUTPUT_ROWS + CNTR_NAME_RS_407_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_410_TIME_TAKEN + CNTR_NAME_RS_407_TIME_TAKEN - CNTR_NAME_SEL_410_FATAL_ERROR + CNTR_NAME_RS_407_FATAL_ERROR - SEL_410 + RS_407 - + @@ -1316,8 +1052,11 @@ - _col0 + VALUE._col0 + + b + @@ -1326,8 +1065,11 @@ - _col1 + VALUE._col1 + + b + @@ -1341,121 +1083,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_409_NUM_INPUT_ROWS + CNTR_NAME_SEL_402_NUM_INPUT_ROWS - CNTR_NAME_FIL_409_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_402_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_409_TIME_TAKEN + CNTR_NAME_SEL_402_TIME_TAKEN - CNTR_NAME_FIL_409_FATAL_ERROR + CNTR_NAME_SEL_402_FATAL_ERROR - FIL_409 + SEL_402 - + @@ -1464,26 +1170,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1592,21 +1292,21 @@ - CNTR_NAME_FIL_421_NUM_INPUT_ROWS + CNTR_NAME_FIL_413_NUM_INPUT_ROWS - CNTR_NAME_FIL_421_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_413_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_421_TIME_TAKEN + CNTR_NAME_FIL_413_TIME_TAKEN - CNTR_NAME_FIL_421_FATAL_ERROR + CNTR_NAME_FIL_413_FATAL_ERROR - FIL_421 + FIL_413 @@ -1620,10 +1320,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + @@ -1678,16 +1398,16 @@ - CNTR_NAME_TS_408_NUM_INPUT_ROWS + CNTR_NAME_TS_400_NUM_INPUT_ROWS - CNTR_NAME_TS_408_NUM_OUTPUT_ROWS + CNTR_NAME_TS_400_NUM_OUTPUT_ROWS - CNTR_NAME_TS_408_TIME_TAKEN + CNTR_NAME_TS_400_TIME_TAKEN - CNTR_NAME_TS_408_FATAL_ERROR + CNTR_NAME_TS_400_FATAL_ERROR @@ -1702,7 +1422,7 @@ - TS_408 + TS_400 @@ -1943,21 +1663,21 @@ - CNTR_NAME_FS_419_NUM_INPUT_ROWS + CNTR_NAME_FS_411_NUM_INPUT_ROWS - CNTR_NAME_FS_419_NUM_OUTPUT_ROWS + CNTR_NAME_FS_411_NUM_OUTPUT_ROWS - CNTR_NAME_FS_419_TIME_TAKEN + CNTR_NAME_FS_411_TIME_TAKEN - CNTR_NAME_FS_419_FATAL_ERROR + CNTR_NAME_FS_411_FATAL_ERROR - FS_419 + FS_411 @@ -2117,21 +1837,21 @@ - CNTR_NAME_SEL_418_NUM_INPUT_ROWS + CNTR_NAME_SEL_410_NUM_INPUT_ROWS - CNTR_NAME_SEL_418_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_410_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_418_TIME_TAKEN + CNTR_NAME_SEL_410_TIME_TAKEN - CNTR_NAME_SEL_418_FATAL_ERROR + CNTR_NAME_SEL_410_FATAL_ERROR - SEL_418 + SEL_410 @@ -2315,21 +2035,21 @@ - CNTR_NAME_SEL_417_NUM_INPUT_ROWS + CNTR_NAME_SEL_409_NUM_INPUT_ROWS - CNTR_NAME_SEL_417_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_409_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_417_TIME_TAKEN + CNTR_NAME_SEL_409_TIME_TAKEN - CNTR_NAME_SEL_417_FATAL_ERROR + CNTR_NAME_SEL_409_FATAL_ERROR - SEL_417 + SEL_409 @@ -2555,21 +2275,21 @@ - CNTR_NAME_JOIN_416_NUM_INPUT_ROWS + CNTR_NAME_JOIN_408_NUM_INPUT_ROWS - CNTR_NAME_JOIN_416_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_408_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_416_TIME_TAKEN + CNTR_NAME_JOIN_408_TIME_TAKEN - CNTR_NAME_JOIN_416_FATAL_ERROR + CNTR_NAME_JOIN_408_FATAL_ERROR - JOIN_416 + JOIN_408 Index: ql/src/test/results/compiler/plan/join6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join6.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/join6.q.xml (working copy) @@ -284,249 +284,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_442_NUM_INPUT_ROWS - - - CNTR_NAME_RS_442_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_442_TIME_TAKEN - - - CNTR_NAME_RS_442_FATAL_ERROR - - - - - RS_442 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -535,18 +320,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -556,31 +390,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_441_NUM_INPUT_ROWS + CNTR_NAME_RS_432_NUM_INPUT_ROWS - CNTR_NAME_SEL_441_NUM_OUTPUT_ROWS + CNTR_NAME_RS_432_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_441_TIME_TAKEN + CNTR_NAME_RS_432_TIME_TAKEN - CNTR_NAME_SEL_441_FATAL_ERROR + CNTR_NAME_RS_432_FATAL_ERROR - SEL_441 + RS_432 - + @@ -591,8 +467,11 @@ - _col0 + VALUE._col0 + + a + @@ -601,8 +480,11 @@ - _col1 + VALUE._col1 + + a + @@ -616,124 +498,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_440_NUM_INPUT_ROWS + CNTR_NAME_SEL_431_NUM_INPUT_ROWS - CNTR_NAME_FIL_440_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_431_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_440_TIME_TAKEN + CNTR_NAME_SEL_431_TIME_TAKEN - CNTR_NAME_FIL_440_FATAL_ERROR + CNTR_NAME_SEL_431_FATAL_ERROR - FIL_440 + SEL_431 @@ -747,26 +585,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -806,7 +638,11 @@ - + + + int + + 10 @@ -819,7 +655,11 @@ - + + + boolean + + @@ -875,21 +715,21 @@ - CNTR_NAME_FIL_448_NUM_INPUT_ROWS + CNTR_NAME_FIL_438_NUM_INPUT_ROWS - CNTR_NAME_FIL_448_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_438_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_448_TIME_TAKEN + CNTR_NAME_FIL_438_TIME_TAKEN - CNTR_NAME_FIL_448_FATAL_ERROR + CNTR_NAME_FIL_438_FATAL_ERROR - FIL_448 + FIL_438 @@ -903,10 +743,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -965,16 +825,16 @@ - CNTR_NAME_TS_439_NUM_INPUT_ROWS + CNTR_NAME_TS_429_NUM_INPUT_ROWS - CNTR_NAME_TS_439_NUM_OUTPUT_ROWS + CNTR_NAME_TS_429_NUM_OUTPUT_ROWS - CNTR_NAME_TS_439_TIME_TAKEN + CNTR_NAME_TS_429_TIME_TAKEN - CNTR_NAME_TS_439_FATAL_ERROR + CNTR_NAME_TS_429_FATAL_ERROR @@ -989,7 +849,7 @@ - TS_439 + TS_429 @@ -1006,252 +866,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_443_NUM_INPUT_ROWS - - - CNTR_NAME_RS_443_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_443_TIME_TAKEN - - - CNTR_NAME_RS_443_FATAL_ERROR - - - - - RS_443 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1260,18 +902,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1281,31 +972,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_438_NUM_INPUT_ROWS + CNTR_NAME_RS_433_NUM_INPUT_ROWS - CNTR_NAME_SEL_438_NUM_OUTPUT_ROWS + CNTR_NAME_RS_433_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_438_TIME_TAKEN + CNTR_NAME_RS_433_TIME_TAKEN - CNTR_NAME_SEL_438_FATAL_ERROR + CNTR_NAME_RS_433_FATAL_ERROR - SEL_438 + RS_433 - + @@ -1316,8 +1052,11 @@ - _col0 + VALUE._col0 + + b + @@ -1326,8 +1065,11 @@ - _col1 + VALUE._col1 + + b + @@ -1341,121 +1083,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_437_NUM_INPUT_ROWS + CNTR_NAME_SEL_428_NUM_INPUT_ROWS - CNTR_NAME_FIL_437_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_428_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_437_TIME_TAKEN + CNTR_NAME_SEL_428_TIME_TAKEN - CNTR_NAME_FIL_437_FATAL_ERROR + CNTR_NAME_SEL_428_FATAL_ERROR - FIL_437 + SEL_428 - + @@ -1464,26 +1170,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1592,21 +1292,21 @@ - CNTR_NAME_FIL_449_NUM_INPUT_ROWS + CNTR_NAME_FIL_439_NUM_INPUT_ROWS - CNTR_NAME_FIL_449_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_439_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_449_TIME_TAKEN + CNTR_NAME_FIL_439_TIME_TAKEN - CNTR_NAME_FIL_449_FATAL_ERROR + CNTR_NAME_FIL_439_FATAL_ERROR - FIL_449 + FIL_439 @@ -1620,10 +1320,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + @@ -1678,16 +1398,16 @@ - CNTR_NAME_TS_436_NUM_INPUT_ROWS + CNTR_NAME_TS_426_NUM_INPUT_ROWS - CNTR_NAME_TS_436_NUM_OUTPUT_ROWS + CNTR_NAME_TS_426_NUM_OUTPUT_ROWS - CNTR_NAME_TS_436_TIME_TAKEN + CNTR_NAME_TS_426_TIME_TAKEN - CNTR_NAME_TS_436_FATAL_ERROR + CNTR_NAME_TS_426_FATAL_ERROR @@ -1702,7 +1422,7 @@ - TS_436 + TS_426 @@ -1943,21 +1663,21 @@ - CNTR_NAME_FS_447_NUM_INPUT_ROWS + CNTR_NAME_FS_437_NUM_INPUT_ROWS - CNTR_NAME_FS_447_NUM_OUTPUT_ROWS + CNTR_NAME_FS_437_NUM_OUTPUT_ROWS - CNTR_NAME_FS_447_TIME_TAKEN + CNTR_NAME_FS_437_TIME_TAKEN - CNTR_NAME_FS_447_FATAL_ERROR + CNTR_NAME_FS_437_FATAL_ERROR - FS_447 + FS_437 @@ -2117,21 +1837,21 @@ - CNTR_NAME_SEL_446_NUM_INPUT_ROWS + CNTR_NAME_SEL_436_NUM_INPUT_ROWS - CNTR_NAME_SEL_446_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_436_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_446_TIME_TAKEN + CNTR_NAME_SEL_436_TIME_TAKEN - CNTR_NAME_SEL_446_FATAL_ERROR + CNTR_NAME_SEL_436_FATAL_ERROR - SEL_446 + SEL_436 @@ -2315,21 +2035,21 @@ - CNTR_NAME_SEL_445_NUM_INPUT_ROWS + CNTR_NAME_SEL_435_NUM_INPUT_ROWS - CNTR_NAME_SEL_445_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_435_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_445_TIME_TAKEN + CNTR_NAME_SEL_435_TIME_TAKEN - CNTR_NAME_SEL_445_FATAL_ERROR + CNTR_NAME_SEL_435_FATAL_ERROR - SEL_445 + SEL_435 @@ -2555,21 +2275,21 @@ - CNTR_NAME_JOIN_444_NUM_INPUT_ROWS + CNTR_NAME_JOIN_434_NUM_INPUT_ROWS - CNTR_NAME_JOIN_444_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_434_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_444_TIME_TAKEN + CNTR_NAME_JOIN_434_TIME_TAKEN - CNTR_NAME_JOIN_444_FATAL_ERROR + CNTR_NAME_JOIN_434_FATAL_ERROR - JOIN_444 + JOIN_434 Index: ql/src/test/results/compiler/plan/join7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join7.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/join7.q.xml (working copy) @@ -416,249 +416,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_473_NUM_INPUT_ROWS - - - CNTR_NAME_RS_473_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_473_TIME_TAKEN - - - CNTR_NAME_RS_473_FATAL_ERROR - - - - - RS_473 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -667,18 +452,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -688,31 +522,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_472_NUM_INPUT_ROWS + CNTR_NAME_RS_461_NUM_INPUT_ROWS - CNTR_NAME_SEL_472_NUM_OUTPUT_ROWS + CNTR_NAME_RS_461_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_472_TIME_TAKEN + CNTR_NAME_RS_461_TIME_TAKEN - CNTR_NAME_SEL_472_FATAL_ERROR + CNTR_NAME_RS_461_FATAL_ERROR - SEL_472 + RS_461 - + @@ -723,8 +599,11 @@ - _col0 + VALUE._col0 + + a + @@ -733,8 +612,11 @@ - _col1 + VALUE._col1 + + a + @@ -748,124 +630,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_471_NUM_INPUT_ROWS + CNTR_NAME_SEL_460_NUM_INPUT_ROWS - CNTR_NAME_FIL_471_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_460_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_471_TIME_TAKEN + CNTR_NAME_SEL_460_TIME_TAKEN - CNTR_NAME_FIL_471_FATAL_ERROR + CNTR_NAME_SEL_460_FATAL_ERROR - FIL_471 + SEL_460 @@ -879,26 +717,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -938,7 +770,11 @@ - + + + int + + 10 @@ -951,7 +787,11 @@ - + + + boolean + + @@ -1007,21 +847,21 @@ - CNTR_NAME_FIL_480_NUM_INPUT_ROWS + CNTR_NAME_FIL_468_NUM_INPUT_ROWS - CNTR_NAME_FIL_480_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_468_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_480_TIME_TAKEN + CNTR_NAME_FIL_468_TIME_TAKEN - CNTR_NAME_FIL_480_FATAL_ERROR + CNTR_NAME_FIL_468_FATAL_ERROR - FIL_480 + FIL_468 @@ -1035,10 +875,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -1097,16 +957,16 @@ - CNTR_NAME_TS_470_NUM_INPUT_ROWS + CNTR_NAME_TS_458_NUM_INPUT_ROWS - CNTR_NAME_TS_470_NUM_OUTPUT_ROWS + CNTR_NAME_TS_458_NUM_OUTPUT_ROWS - CNTR_NAME_TS_470_TIME_TAKEN + CNTR_NAME_TS_458_TIME_TAKEN - CNTR_NAME_TS_470_FATAL_ERROR + CNTR_NAME_TS_458_FATAL_ERROR @@ -1121,7 +981,7 @@ - TS_470 + TS_458 @@ -1138,252 +998,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_474_NUM_INPUT_ROWS - - - CNTR_NAME_RS_474_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_474_TIME_TAKEN - - - CNTR_NAME_RS_474_FATAL_ERROR - - - - - RS_474 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1392,18 +1034,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1413,31 +1104,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_466_NUM_INPUT_ROWS + CNTR_NAME_RS_462_NUM_INPUT_ROWS - CNTR_NAME_SEL_466_NUM_OUTPUT_ROWS + CNTR_NAME_RS_462_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_466_TIME_TAKEN + CNTR_NAME_RS_462_TIME_TAKEN - CNTR_NAME_SEL_466_FATAL_ERROR + CNTR_NAME_RS_462_FATAL_ERROR - SEL_466 + RS_462 - + @@ -1448,8 +1184,11 @@ - _col0 + VALUE._col0 + + b + @@ -1458,8 +1197,11 @@ - _col1 + VALUE._col1 + + b + @@ -1473,121 +1215,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_465_NUM_INPUT_ROWS + CNTR_NAME_SEL_454_NUM_INPUT_ROWS - CNTR_NAME_FIL_465_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_454_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_465_TIME_TAKEN + CNTR_NAME_SEL_454_TIME_TAKEN - CNTR_NAME_FIL_465_FATAL_ERROR + CNTR_NAME_SEL_454_FATAL_ERROR - FIL_465 + SEL_454 - + @@ -1596,26 +1302,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1724,21 +1424,21 @@ - CNTR_NAME_FIL_481_NUM_INPUT_ROWS + CNTR_NAME_FIL_469_NUM_INPUT_ROWS - CNTR_NAME_FIL_481_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_469_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_481_TIME_TAKEN + CNTR_NAME_FIL_469_TIME_TAKEN - CNTR_NAME_FIL_481_FATAL_ERROR + CNTR_NAME_FIL_469_FATAL_ERROR - FIL_481 + FIL_469 @@ -1752,10 +1452,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + @@ -1810,16 +1530,16 @@ - CNTR_NAME_TS_464_NUM_INPUT_ROWS + CNTR_NAME_TS_452_NUM_INPUT_ROWS - CNTR_NAME_TS_464_NUM_OUTPUT_ROWS + CNTR_NAME_TS_452_NUM_OUTPUT_ROWS - CNTR_NAME_TS_464_TIME_TAKEN + CNTR_NAME_TS_452_TIME_TAKEN - CNTR_NAME_TS_464_FATAL_ERROR + CNTR_NAME_TS_452_FATAL_ERROR @@ -1834,7 +1554,7 @@ - TS_464 + TS_452 @@ -1851,252 +1571,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 2 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_475_NUM_INPUT_ROWS - - - CNTR_NAME_RS_475_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_475_TIME_TAKEN - - - CNTR_NAME_RS_475_FATAL_ERROR - - - - - RS_475 - - - - - - - - - - - - - - - - VALUE._col0 - - - c - - - - - - - - - - VALUE._col1 - - - c - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src3 - - _col0 - + VALUE._col0 + - key + _col0 - - src3 - @@ -2105,18 +1607,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -2126,31 +1677,76 @@ + + + + + 2 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_469_NUM_INPUT_ROWS + CNTR_NAME_RS_463_NUM_INPUT_ROWS - CNTR_NAME_SEL_469_NUM_OUTPUT_ROWS + CNTR_NAME_RS_463_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_469_TIME_TAKEN + CNTR_NAME_RS_463_TIME_TAKEN - CNTR_NAME_SEL_469_FATAL_ERROR + CNTR_NAME_RS_463_FATAL_ERROR - SEL_469 + RS_463 - + @@ -2161,8 +1757,11 @@ - _col0 + VALUE._col0 + + c + @@ -2171,8 +1770,11 @@ - _col1 + VALUE._col1 + + c + @@ -2186,121 +1788,85 @@ - - - - - - - - - - - - - - key - - - src3 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - - - - - - - - key - - - src3 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src3 - + + + _col0 + + + key + + + src3 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_468_NUM_INPUT_ROWS + CNTR_NAME_SEL_457_NUM_INPUT_ROWS - CNTR_NAME_FIL_468_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_457_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_468_TIME_TAKEN + CNTR_NAME_SEL_457_TIME_TAKEN - CNTR_NAME_FIL_468_FATAL_ERROR + CNTR_NAME_SEL_457_FATAL_ERROR - FIL_468 + SEL_457 - + @@ -2309,26 +1875,20 @@ - + - key + _col0 - - src3 - - + - value + _col1 - - src3 - @@ -2437,21 +1997,21 @@ - CNTR_NAME_FIL_482_NUM_INPUT_ROWS + CNTR_NAME_FIL_470_NUM_INPUT_ROWS - CNTR_NAME_FIL_482_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_470_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_482_TIME_TAKEN + CNTR_NAME_FIL_470_TIME_TAKEN - CNTR_NAME_FIL_482_FATAL_ERROR + CNTR_NAME_FIL_470_FATAL_ERROR - FIL_482 + FIL_470 @@ -2465,10 +2025,30 @@ - + + + key + + + src3 + + + + + - + + + value + + + src3 + + + + + @@ -2523,16 +2103,16 @@ - CNTR_NAME_TS_467_NUM_INPUT_ROWS + CNTR_NAME_TS_455_NUM_INPUT_ROWS - CNTR_NAME_TS_467_NUM_OUTPUT_ROWS + CNTR_NAME_TS_455_NUM_OUTPUT_ROWS - CNTR_NAME_TS_467_TIME_TAKEN + CNTR_NAME_TS_455_TIME_TAKEN - CNTR_NAME_TS_467_FATAL_ERROR + CNTR_NAME_TS_455_FATAL_ERROR @@ -2547,7 +2127,7 @@ - TS_467 + TS_455 @@ -2791,21 +2371,21 @@ - CNTR_NAME_FS_479_NUM_INPUT_ROWS + CNTR_NAME_FS_467_NUM_INPUT_ROWS - CNTR_NAME_FS_479_NUM_OUTPUT_ROWS + CNTR_NAME_FS_467_NUM_OUTPUT_ROWS - CNTR_NAME_FS_479_TIME_TAKEN + CNTR_NAME_FS_467_TIME_TAKEN - CNTR_NAME_FS_479_FATAL_ERROR + CNTR_NAME_FS_467_FATAL_ERROR - FS_479 + FS_467 @@ -3025,21 +2605,21 @@ - CNTR_NAME_SEL_478_NUM_INPUT_ROWS + CNTR_NAME_SEL_466_NUM_INPUT_ROWS - CNTR_NAME_SEL_478_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_466_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_478_TIME_TAKEN + CNTR_NAME_SEL_466_TIME_TAKEN - CNTR_NAME_SEL_478_FATAL_ERROR + CNTR_NAME_SEL_466_FATAL_ERROR - SEL_478 + SEL_466 @@ -3295,21 +2875,21 @@ - CNTR_NAME_SEL_477_NUM_INPUT_ROWS + CNTR_NAME_SEL_465_NUM_INPUT_ROWS - CNTR_NAME_SEL_477_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_465_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_477_TIME_TAKEN + CNTR_NAME_SEL_465_TIME_TAKEN - CNTR_NAME_SEL_477_FATAL_ERROR + CNTR_NAME_SEL_465_FATAL_ERROR - SEL_477 + SEL_465 @@ -3625,21 +3205,21 @@ - CNTR_NAME_JOIN_476_NUM_INPUT_ROWS + CNTR_NAME_JOIN_464_NUM_INPUT_ROWS - CNTR_NAME_JOIN_476_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_464_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_476_TIME_TAKEN + CNTR_NAME_JOIN_464_TIME_TAKEN - CNTR_NAME_JOIN_476_FATAL_ERROR + CNTR_NAME_JOIN_464_FATAL_ERROR - JOIN_476 + JOIN_464 Index: ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join8.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -284,249 +284,34 @@ - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - string - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_508_NUM_INPUT_ROWS - - - CNTR_NAME_RS_508_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_508_TIME_TAKEN - - - CNTR_NAME_RS_508_FATAL_ERROR - - - - - RS_508 - - - - - - - - - - - - - - - - VALUE._col0 - - - a - - - - - - - - - - VALUE._col1 - - - a - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src1 - - + + + string + + - _col0 - + VALUE._col0 + - key + _col0 - - src1 - @@ -535,18 +320,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -556,31 +390,73 @@ + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_507_NUM_INPUT_ROWS + CNTR_NAME_RS_493_NUM_INPUT_ROWS - CNTR_NAME_SEL_507_NUM_OUTPUT_ROWS + CNTR_NAME_RS_493_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_507_TIME_TAKEN + CNTR_NAME_RS_493_TIME_TAKEN - CNTR_NAME_SEL_507_FATAL_ERROR + CNTR_NAME_RS_493_FATAL_ERROR - SEL_507 + RS_493 - + @@ -591,8 +467,11 @@ - _col0 + VALUE._col0 + + a + @@ -601,8 +480,11 @@ - _col1 + VALUE._col1 + + a + @@ -616,124 +498,80 @@ - - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - int - - - - - 10 - - - - - - - - - - - - boolean - - - - - - - - - - - - - key - - - src1 - - - - - - - - - - - - - 20 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src1 - + + + _col0 + + + key + + + src1 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_506_NUM_INPUT_ROWS + CNTR_NAME_SEL_492_NUM_INPUT_ROWS - CNTR_NAME_FIL_506_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_492_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_506_TIME_TAKEN + CNTR_NAME_SEL_492_TIME_TAKEN - CNTR_NAME_FIL_506_FATAL_ERROR + CNTR_NAME_SEL_492_FATAL_ERROR - FIL_506 + SEL_492 @@ -747,26 +585,20 @@ - + - key + _col0 - - src1 - - + - value + _col1 - - src1 - @@ -806,7 +638,11 @@ - + + + int + + 10 @@ -819,7 +655,11 @@ - + + + boolean + + @@ -875,21 +715,21 @@ - CNTR_NAME_FIL_515_NUM_INPUT_ROWS + CNTR_NAME_FIL_501_NUM_INPUT_ROWS - CNTR_NAME_FIL_515_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_501_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_515_TIME_TAKEN + CNTR_NAME_FIL_501_TIME_TAKEN - CNTR_NAME_FIL_515_FATAL_ERROR + CNTR_NAME_FIL_501_FATAL_ERROR - FIL_515 + FIL_501 @@ -903,10 +743,30 @@ - + + + key + + + src1 + + + + + - + + + value + + + src1 + + + + + @@ -965,16 +825,16 @@ - CNTR_NAME_TS_505_NUM_INPUT_ROWS + CNTR_NAME_TS_490_NUM_INPUT_ROWS - CNTR_NAME_TS_505_NUM_OUTPUT_ROWS + CNTR_NAME_TS_490_NUM_OUTPUT_ROWS - CNTR_NAME_TS_505_TIME_TAKEN + CNTR_NAME_TS_490_TIME_TAKEN - CNTR_NAME_TS_505_FATAL_ERROR + CNTR_NAME_TS_490_FATAL_ERROR @@ -989,7 +849,7 @@ - TS_505 + TS_490 @@ -1006,252 +866,34 @@ - + - + - - - - - - - - - VALUE._col1 - - - _col1 - - - - - - - - VALUE._col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - joinkey0 - - - serialization.sort.order - + - - - columns.types - string - - - - - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - 1 - - - - - - - - - - - - - - - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - - - CNTR_NAME_RS_509_NUM_INPUT_ROWS - - - CNTR_NAME_RS_509_NUM_OUTPUT_ROWS - - - CNTR_NAME_RS_509_TIME_TAKEN - - - CNTR_NAME_RS_509_FATAL_ERROR - - - - - RS_509 - - - - - - - - - - - - - - - - VALUE._col0 - - - b - - - - - - - - - - VALUE._col1 - - - b - - - - - - - - - - - - - - + - _col1 - + VALUE._col1 + - value + _col1 - - src2 - - _col0 - + VALUE._col0 + - key + _col0 - - src2 - @@ -1260,18 +902,67 @@ - - - + + + + + + - + + + _col0 + + + + + + + + + + + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + joinkey0 + + + serialization.sort.order + + + + + columns.types + string + + + + + + + 1 + + + -1 + + + - + reducesinkkey0 - + _col0 @@ -1281,31 +972,76 @@ + + + + + 1 + + + + + + + + + + + + + + + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + columns.types + string,string + + + escape.delim + \ + + + + + - CNTR_NAME_SEL_504_NUM_INPUT_ROWS + CNTR_NAME_RS_494_NUM_INPUT_ROWS - CNTR_NAME_SEL_504_NUM_OUTPUT_ROWS + CNTR_NAME_RS_494_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_504_TIME_TAKEN + CNTR_NAME_RS_494_TIME_TAKEN - CNTR_NAME_SEL_504_FATAL_ERROR + CNTR_NAME_RS_494_FATAL_ERROR - SEL_504 + RS_494 - + @@ -1316,8 +1052,11 @@ - _col0 + VALUE._col0 + + b + @@ -1326,8 +1065,11 @@ - _col1 + VALUE._col1 + + b + @@ -1341,121 +1083,85 @@ - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 15 - - - - - - - - - - - - - - - - - - - - - key - - - src2 - - - - - - - - - - - - - 25 - - - - - - - - - - - - - - + + + + _col1 + + + value - - + + src2 - + + + _col0 + + + key + + + src2 + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + - CNTR_NAME_FIL_503_NUM_INPUT_ROWS + CNTR_NAME_SEL_489_NUM_INPUT_ROWS - CNTR_NAME_FIL_503_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_489_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_503_TIME_TAKEN + CNTR_NAME_SEL_489_TIME_TAKEN - CNTR_NAME_FIL_503_FATAL_ERROR + CNTR_NAME_SEL_489_FATAL_ERROR - FIL_503 + SEL_489 - + @@ -1464,26 +1170,20 @@ - + - key + _col0 - - src2 - - + - value + _col1 - - src2 - @@ -1592,21 +1292,21 @@ - CNTR_NAME_FIL_516_NUM_INPUT_ROWS + CNTR_NAME_FIL_502_NUM_INPUT_ROWS - CNTR_NAME_FIL_516_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_502_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_516_TIME_TAKEN + CNTR_NAME_FIL_502_TIME_TAKEN - CNTR_NAME_FIL_516_FATAL_ERROR + CNTR_NAME_FIL_502_FATAL_ERROR - FIL_516 + FIL_502 @@ -1620,10 +1320,30 @@ - + + + key + + + src2 + + + + + - + + + value + + + src2 + + + + + @@ -1678,16 +1398,16 @@ - CNTR_NAME_TS_502_NUM_INPUT_ROWS + CNTR_NAME_TS_487_NUM_INPUT_ROWS - CNTR_NAME_TS_502_NUM_OUTPUT_ROWS + CNTR_NAME_TS_487_NUM_OUTPUT_ROWS - CNTR_NAME_TS_502_TIME_TAKEN + CNTR_NAME_TS_487_TIME_TAKEN - CNTR_NAME_TS_502_FATAL_ERROR + CNTR_NAME_TS_487_FATAL_ERROR @@ -1702,7 +1422,7 @@ - TS_502 + TS_487 @@ -1886,11 +1606,11 @@ - + - + @@ -1947,21 +1667,21 @@ - CNTR_NAME_FS_514_NUM_INPUT_ROWS + CNTR_NAME_FS_499_NUM_INPUT_ROWS - CNTR_NAME_FS_514_NUM_OUTPUT_ROWS + CNTR_NAME_FS_499_NUM_OUTPUT_ROWS - CNTR_NAME_FS_514_TIME_TAKEN + CNTR_NAME_FS_499_TIME_TAKEN - CNTR_NAME_FS_514_FATAL_ERROR + CNTR_NAME_FS_499_FATAL_ERROR - FS_514 + FS_499 @@ -2121,26 +1841,26 @@ - CNTR_NAME_SEL_513_NUM_INPUT_ROWS + CNTR_NAME_SEL_498_NUM_INPUT_ROWS - CNTR_NAME_SEL_513_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_498_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_513_TIME_TAKEN + CNTR_NAME_SEL_498_TIME_TAKEN - CNTR_NAME_SEL_513_FATAL_ERROR + CNTR_NAME_SEL_498_FATAL_ERROR - SEL_513 + SEL_498 - + @@ -2220,95 +1940,125 @@ - - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - - - - - - + + + + _col3 + + + _col3 - - + + b - + + + _col2 + + + _col2 + + + b + + + + + + + + _col1 + + + _col1 + + + a + + + + + + + + _col0 + + + _col0 + + + a + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + + + - CNTR_NAME_FIL_512_NUM_INPUT_ROWS + CNTR_NAME_SEL_496_NUM_INPUT_ROWS - CNTR_NAME_FIL_512_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_496_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_512_TIME_TAKEN + CNTR_NAME_SEL_496_TIME_TAKEN - CNTR_NAME_FIL_512_FATAL_ERROR + CNTR_NAME_SEL_496_FATAL_ERROR - FIL_512 + SEL_496 - + @@ -2317,7 +2067,7 @@ - + _col0 @@ -2327,7 +2077,7 @@ - + _col1 @@ -2337,7 +2087,7 @@ - + _col2 @@ -2347,7 +2097,7 @@ - + _col3 @@ -2364,120 +2114,96 @@ - - - - _col3 - - - _col3 + + + + + + + + + + + + + + _col2 + + + b + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + a + + + + + + + + + + + + + + + + + - - b + + - + - - _col2 - - - _col2 - - - b - - - - - - - - _col1 - - - _col1 - - - a - - - - - - - - _col0 - - - _col0 - - - a - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - - - - CNTR_NAME_SEL_511_NUM_INPUT_ROWS + CNTR_NAME_FIL_500_NUM_INPUT_ROWS - CNTR_NAME_SEL_511_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_500_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_511_TIME_TAKEN + CNTR_NAME_FIL_500_TIME_TAKEN - CNTR_NAME_SEL_511_FATAL_ERROR + CNTR_NAME_FIL_500_FATAL_ERROR - SEL_511 + FIL_500 @@ -2491,16 +2217,56 @@ - + + + _col0 + + + a + + + + + - + + + _col1 + + + a + + + + + - + + + _col2 + + + b + + + + + - + + + _col3 + + + b + + + + + @@ -2675,21 +2441,21 @@ - CNTR_NAME_JOIN_510_NUM_INPUT_ROWS + CNTR_NAME_JOIN_495_NUM_INPUT_ROWS - CNTR_NAME_JOIN_510_NUM_OUTPUT_ROWS + CNTR_NAME_JOIN_495_NUM_OUTPUT_ROWS - CNTR_NAME_JOIN_510_TIME_TAKEN + CNTR_NAME_JOIN_495_TIME_TAKEN - CNTR_NAME_JOIN_510_FATAL_ERROR + CNTR_NAME_JOIN_495_FATAL_ERROR - JOIN_510 + JOIN_495 @@ -2726,56 +2492,16 @@ - - - _col0 - - - a - - - - - + - - - _col1 - - - a - - - - - + - - - _col2 - - - b - - - - - + - - - _col3 - - - b - - - - - + Index: ql/src/test/results/compiler/plan/sample1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample1.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -75,11 +75,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 transient_lastDdlTime - 1304060232 + 1310382369 @@ -141,11 +141,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1304060232 + 1310382369 @@ -169,282 +169,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-04-28_23-57-22_309_7605083373302035666/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-04-28_23-57-22_309_7605083373302035666/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3 - - - serialization.format - 1 - - - columns.types - string:string:string:string - - - - - - - 1 - - + + + + + file:/tmp/amarsri/hive_2011-07-11_04-06-12_350_8086266026815616219/-ext-10001 + + + 1 + + + file:/tmp/amarsri/hive_2011-07-11_04-06-12_350_8086266026815616219/-ext-10001/ + + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - CNTR_NAME_FS_536_NUM_INPUT_ROWS - - - CNTR_NAME_FS_536_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_536_TIME_TAKEN - - - CNTR_NAME_FS_536_FATAL_ERROR - - + + org.apache.hadoop.mapred.TextInputFormat - - FS_536 + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - + + + + columns + _col0,_col1,_col2,_col3 - - - - - - - - - - _col0 - - - - - - - - string - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - + + serialization.format + 1 + + columns.types + string:string:string:string + - - - - - - _col3 - - - hr - - - true - - - s - - - - - + + 1 - - _col2 - - - ds - - - true - - - s - - - - - - - - _col1 - - - value - - - s - - - - - - - - _col0 - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - - - true - - - - CNTR_NAME_SEL_535_NUM_INPUT_ROWS + CNTR_NAME_FS_520_NUM_INPUT_ROWS - CNTR_NAME_SEL_535_NUM_OUTPUT_ROWS + CNTR_NAME_FS_520_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_535_TIME_TAKEN + CNTR_NAME_FS_520_TIME_TAKEN - CNTR_NAME_SEL_535_FATAL_ERROR + CNTR_NAME_FS_520_FATAL_ERROR - SEL_535 + FS_520 - + @@ -454,30 +251,28 @@ - - key - _col0 - s + - + + + string + + - - value - _col1 - s + @@ -486,14 +281,11 @@ - - ds - _col2 - s + @@ -502,14 +294,11 @@ - - hr - _col3 - s + @@ -524,126 +313,129 @@ - - - - - - - - - - - - - - ds - - - true - - - s - - - - - - - - - - - - - 2008-04-08 - - - - - - - - - - - - boolean - - - - - - - - - - - - - hr - - - true - - - s - - - - - - - - - - - - - 11 - - - - - - - - - - - - - - + + + + _col3 + + + hr - - + + true + + s + - + + + _col2 + + + ds + + + true + + + s + + + + + + + + _col1 + + + value + + + s + + + + + + + + _col0 + + + key + + + s + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + + + true + + + - CNTR_NAME_FIL_534_NUM_INPUT_ROWS + CNTR_NAME_SEL_519_NUM_INPUT_ROWS - CNTR_NAME_FIL_534_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_519_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_534_TIME_TAKEN + CNTR_NAME_SEL_519_TIME_TAKEN - CNTR_NAME_FIL_534_FATAL_ERROR + CNTR_NAME_SEL_519_FATAL_ERROR - FIL_534 + SEL_519 @@ -657,10 +449,13 @@ - - + + key + + _col0 + s @@ -670,10 +465,13 @@ - - + + value + + _col1 + s @@ -683,10 +481,13 @@ - - + + ds + + _col2 + s @@ -696,10 +497,13 @@ - - + + hr + + _col3 + s @@ -767,7 +571,7 @@ - + int @@ -778,7 +582,7 @@ - + 2147483647 @@ -801,14 +605,14 @@ - + - + 1 @@ -831,14 +635,14 @@ - + - + 0 @@ -851,7 +655,11 @@ - + + + boolean + + @@ -860,21 +668,21 @@ - CNTR_NAME_FIL_533_NUM_INPUT_ROWS + CNTR_NAME_FIL_517_NUM_INPUT_ROWS - CNTR_NAME_FIL_533_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_517_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_533_TIME_TAKEN + CNTR_NAME_FIL_517_TIME_TAKEN - CNTR_NAME_FIL_533_FATAL_ERROR + CNTR_NAME_FIL_517_FATAL_ERROR - FIL_533 + FIL_517 @@ -888,16 +696,56 @@ - + + + key + + + s + + + + + - + + + value + + + s + + + + + - + + + ds + + + s + + + + + - + + + hr + + + s + + + + + @@ -920,16 +768,16 @@ - CNTR_NAME_TS_532_NUM_INPUT_ROWS + CNTR_NAME_TS_516_NUM_INPUT_ROWS - CNTR_NAME_TS_532_NUM_OUTPUT_ROWS + CNTR_NAME_TS_516_NUM_OUTPUT_ROWS - CNTR_NAME_TS_532_TIME_TAKEN + CNTR_NAME_TS_516_TIME_TAKEN - CNTR_NAME_TS_532_FATAL_ERROR + CNTR_NAME_TS_516_FATAL_ERROR @@ -944,7 +792,7 @@ - TS_532 + TS_516 @@ -1012,7 +860,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 s @@ -1024,7 +872,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 hr=11 @@ -1094,11 +942,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart/ds=2008-04-08/hr=11 transient_lastDdlTime - 1304060233 + 1310382369 @@ -1160,11 +1008,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcpart + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcpart transient_lastDdlTime - 1304060232 + 1310382369 Index: ql/src/test/results/compiler/plan/sample2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample2.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/sample2.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060251 + 1310382375 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_549_NUM_INPUT_ROWS + CNTR_NAME_FS_531_NUM_INPUT_ROWS - CNTR_NAME_FS_549_NUM_OUTPUT_ROWS + CNTR_NAME_FS_531_NUM_OUTPUT_ROWS - CNTR_NAME_FS_549_TIME_TAKEN + CNTR_NAME_FS_531_TIME_TAKEN - CNTR_NAME_FS_549_FATAL_ERROR + CNTR_NAME_FS_531_FATAL_ERROR - FS_549 + FS_531 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_548_NUM_INPUT_ROWS + CNTR_NAME_TS_530_NUM_INPUT_ROWS - CNTR_NAME_TS_548_NUM_OUTPUT_ROWS + CNTR_NAME_TS_530_NUM_OUTPUT_ROWS - CNTR_NAME_TS_548_TIME_TAKEN + CNTR_NAME_TS_530_TIME_TAKEN - CNTR_NAME_TS_548_FATAL_ERROR + CNTR_NAME_TS_530_FATAL_ERROR - TS_548 + TS_530 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060251 + 1310382375 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060246 + 1310382373 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060246 + 1310382373 @@ -622,351 +622,150 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-32_320_4074994742709103859/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_546_NUM_INPUT_ROWS - - - CNTR_NAME_FS_546_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_546_TIME_TAKEN - - - CNTR_NAME_FS_546_FATAL_ERROR - - - - - FS_546 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-15_772_2901808161378380637/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_545_NUM_INPUT_ROWS + CNTR_NAME_FS_529_NUM_INPUT_ROWS - CNTR_NAME_SEL_545_NUM_OUTPUT_ROWS + CNTR_NAME_FS_529_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_545_TIME_TAKEN + CNTR_NAME_FS_529_TIME_TAKEN - CNTR_NAME_SEL_545_FATAL_ERROR + CNTR_NAME_FS_529_FATAL_ERROR - SEL_545 + FS_529 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int - - + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + - CNTR_NAME_FIL_544_NUM_INPUT_ROWS + CNTR_NAME_SEL_528_NUM_INPUT_ROWS - CNTR_NAME_FIL_544_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_528_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_544_TIME_TAKEN + CNTR_NAME_SEL_528_TIME_TAKEN - CNTR_NAME_FIL_544_FATAL_ERROR + CNTR_NAME_SEL_528_FATAL_ERROR - FIL_544 + SEL_528 @@ -980,9 +779,9 @@ - + - key + _col0 s @@ -993,9 +792,9 @@ - + - value + _col1 s @@ -1015,6 +814,9 @@ + + true + @@ -1130,30 +932,37 @@ - + + + boolean + + + + + - CNTR_NAME_FIL_547_NUM_INPUT_ROWS + CNTR_NAME_FIL_527_NUM_INPUT_ROWS - CNTR_NAME_FIL_547_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_527_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_547_TIME_TAKEN + CNTR_NAME_FIL_527_TIME_TAKEN - CNTR_NAME_FIL_547_FATAL_ERROR + CNTR_NAME_FIL_527_FATAL_ERROR - FIL_547 + FIL_527 @@ -1165,40 +974,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1229,16 +1022,16 @@ - CNTR_NAME_TS_543_NUM_INPUT_ROWS + CNTR_NAME_TS_526_NUM_INPUT_ROWS - CNTR_NAME_TS_543_NUM_OUTPUT_ROWS + CNTR_NAME_TS_526_NUM_OUTPUT_ROWS - CNTR_NAME_TS_543_TIME_TAKEN + CNTR_NAME_TS_526_TIME_TAKEN - CNTR_NAME_TS_543_FATAL_ERROR + CNTR_NAME_TS_526_FATAL_ERROR @@ -1253,12 +1046,55 @@ - TS_543 + TS_526 - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1275,7 +1111,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1287,10 +1123,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket - srcbucket0.txt + srcbucket org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1348,11 +1184,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060246 + 1310382373 @@ -1414,11 +1250,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060246 + 1310382373 Index: ql/src/test/results/compiler/plan/sample3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample3.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/sample3.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060262 + 1310382379 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_563_NUM_INPUT_ROWS + CNTR_NAME_FS_543_NUM_INPUT_ROWS - CNTR_NAME_FS_563_NUM_OUTPUT_ROWS + CNTR_NAME_FS_543_NUM_OUTPUT_ROWS - CNTR_NAME_FS_563_TIME_TAKEN + CNTR_NAME_FS_543_TIME_TAKEN - CNTR_NAME_FS_563_FATAL_ERROR + CNTR_NAME_FS_543_FATAL_ERROR - FS_563 + FS_543 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_562_NUM_INPUT_ROWS + CNTR_NAME_TS_542_NUM_INPUT_ROWS - CNTR_NAME_TS_562_NUM_OUTPUT_ROWS + CNTR_NAME_TS_542_NUM_OUTPUT_ROWS - CNTR_NAME_TS_562_TIME_TAKEN + CNTR_NAME_TS_542_TIME_TAKEN - CNTR_NAME_TS_562_FATAL_ERROR + CNTR_NAME_TS_542_FATAL_ERROR - TS_562 + TS_542 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060262 + 1310382379 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060256 + 1310382377 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060256 + 1310382377 @@ -622,336 +622,98 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-42_366_4406586519555038916/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_560_NUM_INPUT_ROWS - - - CNTR_NAME_FS_560_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_560_TIME_TAKEN - - - CNTR_NAME_FS_560_FATAL_ERROR - - - - - FS_560 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-19_243_2379834322132096386/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_559_NUM_INPUT_ROWS + CNTR_NAME_FS_541_NUM_INPUT_ROWS - CNTR_NAME_SEL_559_NUM_OUTPUT_ROWS + CNTR_NAME_FS_541_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_559_TIME_TAKEN + CNTR_NAME_FS_541_TIME_TAKEN - CNTR_NAME_SEL_559_FATAL_ERROR + CNTR_NAME_FS_541_FATAL_ERROR - SEL_559 + FS_541 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - value - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int @@ -959,24 +721,51 @@ + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + - CNTR_NAME_FIL_558_NUM_INPUT_ROWS + CNTR_NAME_SEL_540_NUM_INPUT_ROWS - CNTR_NAME_FIL_558_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_540_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_558_TIME_TAKEN + CNTR_NAME_SEL_540_TIME_TAKEN - CNTR_NAME_FIL_558_FATAL_ERROR + CNTR_NAME_SEL_540_FATAL_ERROR - FIL_558 + SEL_540 @@ -990,9 +779,9 @@ - + - key + _col0 s @@ -1003,9 +792,9 @@ - + - value + _col1 s @@ -1025,6 +814,9 @@ + + true + @@ -1153,7 +945,11 @@ - + + + boolean + + @@ -1162,21 +958,21 @@ - CNTR_NAME_FIL_561_NUM_INPUT_ROWS + CNTR_NAME_FIL_539_NUM_INPUT_ROWS - CNTR_NAME_FIL_561_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_539_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_561_TIME_TAKEN + CNTR_NAME_FIL_539_TIME_TAKEN - CNTR_NAME_FIL_561_FATAL_ERROR + CNTR_NAME_FIL_539_FATAL_ERROR - FIL_561 + FIL_539 @@ -1188,40 +984,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1252,16 +1032,16 @@ - CNTR_NAME_TS_557_NUM_INPUT_ROWS + CNTR_NAME_TS_538_NUM_INPUT_ROWS - CNTR_NAME_TS_557_NUM_OUTPUT_ROWS + CNTR_NAME_TS_538_NUM_OUTPUT_ROWS - CNTR_NAME_TS_557_TIME_TAKEN + CNTR_NAME_TS_538_TIME_TAKEN - CNTR_NAME_TS_557_FATAL_ERROR + CNTR_NAME_TS_538_FATAL_ERROR @@ -1276,12 +1056,55 @@ - TS_557 + TS_538 - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1298,7 +1121,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1310,7 +1133,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket srcbucket @@ -1371,11 +1194,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060256 + 1310382377 @@ -1437,11 +1260,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060256 + 1310382377 Index: ql/src/test/results/compiler/plan/sample4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample4.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/sample4.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060272 + 1310382382 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_577_NUM_INPUT_ROWS + CNTR_NAME_FS_555_NUM_INPUT_ROWS - CNTR_NAME_FS_577_NUM_OUTPUT_ROWS + CNTR_NAME_FS_555_NUM_OUTPUT_ROWS - CNTR_NAME_FS_577_TIME_TAKEN + CNTR_NAME_FS_555_TIME_TAKEN - CNTR_NAME_FS_577_FATAL_ERROR + CNTR_NAME_FS_555_FATAL_ERROR - FS_577 + FS_555 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_576_NUM_INPUT_ROWS + CNTR_NAME_TS_554_NUM_INPUT_ROWS - CNTR_NAME_TS_576_NUM_OUTPUT_ROWS + CNTR_NAME_TS_554_NUM_OUTPUT_ROWS - CNTR_NAME_TS_576_TIME_TAKEN + CNTR_NAME_TS_554_TIME_TAKEN - CNTR_NAME_TS_576_FATAL_ERROR + CNTR_NAME_TS_554_FATAL_ERROR - TS_576 + TS_554 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060272 + 1310382382 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060266 + 1310382380 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060266 + 1310382380 @@ -622,351 +622,150 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-57-52_427_2529501116964601029/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_574_NUM_INPUT_ROWS - - - CNTR_NAME_FS_574_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_574_TIME_TAKEN - - - CNTR_NAME_FS_574_FATAL_ERROR - - - - - FS_574 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-22_731_7652885142239938307/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_573_NUM_INPUT_ROWS + CNTR_NAME_FS_553_NUM_INPUT_ROWS - CNTR_NAME_SEL_573_NUM_OUTPUT_ROWS + CNTR_NAME_FS_553_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_573_TIME_TAKEN + CNTR_NAME_FS_553_TIME_TAKEN - CNTR_NAME_SEL_573_FATAL_ERROR + CNTR_NAME_FS_553_FATAL_ERROR - SEL_573 + FS_553 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 2 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int - - + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + - CNTR_NAME_FIL_572_NUM_INPUT_ROWS + CNTR_NAME_SEL_552_NUM_INPUT_ROWS - CNTR_NAME_FIL_572_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_552_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_572_TIME_TAKEN + CNTR_NAME_SEL_552_TIME_TAKEN - CNTR_NAME_FIL_572_FATAL_ERROR + CNTR_NAME_SEL_552_FATAL_ERROR - FIL_572 + SEL_552 @@ -980,9 +779,9 @@ - + - key + _col0 s @@ -993,9 +792,9 @@ - + - value + _col1 s @@ -1015,6 +814,9 @@ + + true + @@ -1130,30 +932,37 @@ - + + + boolean + + + + + - CNTR_NAME_FIL_575_NUM_INPUT_ROWS + CNTR_NAME_FIL_551_NUM_INPUT_ROWS - CNTR_NAME_FIL_575_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_551_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_575_TIME_TAKEN + CNTR_NAME_FIL_551_TIME_TAKEN - CNTR_NAME_FIL_575_FATAL_ERROR + CNTR_NAME_FIL_551_FATAL_ERROR - FIL_575 + FIL_551 @@ -1165,40 +974,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1229,16 +1022,16 @@ - CNTR_NAME_TS_571_NUM_INPUT_ROWS + CNTR_NAME_TS_550_NUM_INPUT_ROWS - CNTR_NAME_TS_571_NUM_OUTPUT_ROWS + CNTR_NAME_TS_550_NUM_OUTPUT_ROWS - CNTR_NAME_TS_571_TIME_TAKEN + CNTR_NAME_TS_550_TIME_TAKEN - CNTR_NAME_TS_571_FATAL_ERROR + CNTR_NAME_TS_550_FATAL_ERROR @@ -1253,12 +1046,55 @@ - TS_571 + TS_550 - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1275,7 +1111,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1287,10 +1123,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket - srcbucket0.txt + srcbucket org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1348,11 +1184,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060266 + 1310382380 @@ -1414,11 +1250,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060266 + 1310382380 Index: ql/src/test/results/compiler/plan/sample5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample5.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/sample5.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060282 + 1310382386 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_591_NUM_INPUT_ROWS + CNTR_NAME_FS_567_NUM_INPUT_ROWS - CNTR_NAME_FS_591_NUM_OUTPUT_ROWS + CNTR_NAME_FS_567_NUM_OUTPUT_ROWS - CNTR_NAME_FS_591_TIME_TAKEN + CNTR_NAME_FS_567_TIME_TAKEN - CNTR_NAME_FS_591_FATAL_ERROR + CNTR_NAME_FS_567_FATAL_ERROR - FS_591 + FS_567 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_590_NUM_INPUT_ROWS + CNTR_NAME_TS_566_NUM_INPUT_ROWS - CNTR_NAME_TS_590_NUM_OUTPUT_ROWS + CNTR_NAME_TS_566_NUM_OUTPUT_ROWS - CNTR_NAME_TS_590_TIME_TAKEN + CNTR_NAME_TS_566_TIME_TAKEN - CNTR_NAME_TS_590_FATAL_ERROR + CNTR_NAME_TS_566_FATAL_ERROR - TS_590 + TS_566 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060282 + 1310382386 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060276 + 1310382384 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060276 + 1310382384 @@ -622,323 +622,98 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-02_739_6497337761493060098/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_588_NUM_INPUT_ROWS - - - CNTR_NAME_FS_588_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_588_TIME_TAKEN - - - CNTR_NAME_FS_588_FATAL_ERROR - - - - - FS_588 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-26_178_8445131308101140110/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_587_NUM_INPUT_ROWS + CNTR_NAME_FS_565_NUM_INPUT_ROWS - CNTR_NAME_SEL_587_NUM_OUTPUT_ROWS + CNTR_NAME_FS_565_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_587_TIME_TAKEN + CNTR_NAME_FS_565_TIME_TAKEN - CNTR_NAME_SEL_587_FATAL_ERROR + CNTR_NAME_FS_565_FATAL_ERROR - SEL_587 + FS_565 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 5 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int @@ -946,24 +721,51 @@ + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + - CNTR_NAME_FIL_586_NUM_INPUT_ROWS + CNTR_NAME_SEL_564_NUM_INPUT_ROWS - CNTR_NAME_FIL_586_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_564_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_586_TIME_TAKEN + CNTR_NAME_SEL_564_TIME_TAKEN - CNTR_NAME_FIL_586_FATAL_ERROR + CNTR_NAME_SEL_564_FATAL_ERROR - FIL_586 + SEL_564 @@ -977,9 +779,9 @@ - + - key + _col0 s @@ -990,9 +792,9 @@ - + - value + _col1 s @@ -1012,6 +814,9 @@ + + true + @@ -1127,7 +932,11 @@ - + + + boolean + + @@ -1136,21 +945,21 @@ - CNTR_NAME_FIL_589_NUM_INPUT_ROWS + CNTR_NAME_FIL_563_NUM_INPUT_ROWS - CNTR_NAME_FIL_589_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_563_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_589_TIME_TAKEN + CNTR_NAME_FIL_563_TIME_TAKEN - CNTR_NAME_FIL_589_FATAL_ERROR + CNTR_NAME_FIL_563_FATAL_ERROR - FIL_589 + FIL_563 @@ -1162,40 +971,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1226,16 +1019,16 @@ - CNTR_NAME_TS_585_NUM_INPUT_ROWS + CNTR_NAME_TS_562_NUM_INPUT_ROWS - CNTR_NAME_TS_585_NUM_OUTPUT_ROWS + CNTR_NAME_TS_562_NUM_OUTPUT_ROWS - CNTR_NAME_TS_585_TIME_TAKEN + CNTR_NAME_TS_562_TIME_TAKEN - CNTR_NAME_TS_585_FATAL_ERROR + CNTR_NAME_TS_562_FATAL_ERROR @@ -1250,12 +1043,55 @@ - TS_585 + TS_562 - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1272,7 +1108,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1284,7 +1120,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket srcbucket @@ -1345,11 +1181,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060276 + 1310382384 @@ -1411,11 +1247,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060276 + 1310382384 Index: ql/src/test/results/compiler/plan/sample6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample6.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/sample6.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060292 + 1310382389 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_605_NUM_INPUT_ROWS + CNTR_NAME_FS_579_NUM_INPUT_ROWS - CNTR_NAME_FS_605_NUM_OUTPUT_ROWS + CNTR_NAME_FS_579_NUM_OUTPUT_ROWS - CNTR_NAME_FS_605_TIME_TAKEN + CNTR_NAME_FS_579_TIME_TAKEN - CNTR_NAME_FS_605_FATAL_ERROR + CNTR_NAME_FS_579_FATAL_ERROR - FS_605 + FS_579 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_604_NUM_INPUT_ROWS + CNTR_NAME_TS_578_NUM_INPUT_ROWS - CNTR_NAME_TS_604_NUM_OUTPUT_ROWS + CNTR_NAME_TS_578_NUM_OUTPUT_ROWS - CNTR_NAME_TS_604_TIME_TAKEN + CNTR_NAME_TS_578_TIME_TAKEN - CNTR_NAME_TS_604_FATAL_ERROR + CNTR_NAME_TS_578_FATAL_ERROR - TS_604 + TS_578 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060292 + 1310382389 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060287 + 1310382387 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060287 + 1310382387 @@ -622,351 +622,150 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-12_841_9078625175194764446/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_602_NUM_INPUT_ROWS - - - CNTR_NAME_FS_602_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_602_TIME_TAKEN - - - CNTR_NAME_FS_602_FATAL_ERROR - - - - - FS_602 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-29_716_3544047158874325131/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_601_NUM_INPUT_ROWS + CNTR_NAME_FS_577_NUM_INPUT_ROWS - CNTR_NAME_SEL_601_NUM_OUTPUT_ROWS + CNTR_NAME_FS_577_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_601_TIME_TAKEN + CNTR_NAME_FS_577_TIME_TAKEN - CNTR_NAME_SEL_601_FATAL_ERROR + CNTR_NAME_FS_577_FATAL_ERROR - SEL_601 + FS_577 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - true + + + + _col1 + + + value + + + s + + + + + - - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 4 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + _col0 + + + key - - + + s - + - boolean + int - - + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + - CNTR_NAME_FIL_600_NUM_INPUT_ROWS + CNTR_NAME_SEL_576_NUM_INPUT_ROWS - CNTR_NAME_FIL_600_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_576_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_600_TIME_TAKEN + CNTR_NAME_SEL_576_TIME_TAKEN - CNTR_NAME_FIL_600_FATAL_ERROR + CNTR_NAME_SEL_576_FATAL_ERROR - FIL_600 + SEL_576 @@ -980,9 +779,9 @@ - + - key + _col0 s @@ -993,9 +792,9 @@ - + - value + _col1 s @@ -1015,6 +814,9 @@ + + true + @@ -1130,30 +932,37 @@ - + + + boolean + + + + + - CNTR_NAME_FIL_603_NUM_INPUT_ROWS + CNTR_NAME_FIL_575_NUM_INPUT_ROWS - CNTR_NAME_FIL_603_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_575_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_603_TIME_TAKEN + CNTR_NAME_FIL_575_TIME_TAKEN - CNTR_NAME_FIL_603_FATAL_ERROR + CNTR_NAME_FIL_575_FATAL_ERROR - FIL_603 + FIL_575 @@ -1165,40 +974,24 @@ - + - - - - - - - - - true - + - BLOCK__OFFSET__INSIDE__FILE + key s - - - bigint - - + - - - true - + - INPUT__FILE__NAME + value s @@ -1229,16 +1022,16 @@ - CNTR_NAME_TS_599_NUM_INPUT_ROWS + CNTR_NAME_TS_574_NUM_INPUT_ROWS - CNTR_NAME_TS_599_NUM_OUTPUT_ROWS + CNTR_NAME_TS_574_NUM_OUTPUT_ROWS - CNTR_NAME_TS_599_TIME_TAKEN + CNTR_NAME_TS_574_TIME_TAKEN - CNTR_NAME_TS_599_FATAL_ERROR + CNTR_NAME_TS_574_FATAL_ERROR @@ -1253,12 +1046,55 @@ - TS_599 + TS_574 - + + + + + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + s + + + + + bigint + + + + + + + + + true + + + INPUT__FILE__NAME + + + s + + + + + + + @@ -1275,7 +1111,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket s @@ -1287,10 +1123,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket - srcbucket0.txt + srcbucket org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1348,11 +1184,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060287 + 1310382387 @@ -1414,11 +1250,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060287 + 1310382387 Index: ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample7.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -33,7 +33,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10000/ + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10000/ @@ -73,7 +73,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10002 @@ -82,7 +82,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10000 1 @@ -134,7 +134,7 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 file.outputformat @@ -142,7 +142,7 @@ transient_lastDdlTime - 1304060302 + 1310382393 @@ -159,21 +159,21 @@ - CNTR_NAME_FS_620_NUM_INPUT_ROWS + CNTR_NAME_FS_593_NUM_INPUT_ROWS - CNTR_NAME_FS_620_NUM_OUTPUT_ROWS + CNTR_NAME_FS_593_NUM_OUTPUT_ROWS - CNTR_NAME_FS_620_TIME_TAKEN + CNTR_NAME_FS_593_TIME_TAKEN - CNTR_NAME_FS_620_FATAL_ERROR + CNTR_NAME_FS_593_FATAL_ERROR - FS_620 + FS_593 @@ -227,21 +227,21 @@ - CNTR_NAME_TS_619_NUM_INPUT_ROWS + CNTR_NAME_TS_592_NUM_INPUT_ROWS - CNTR_NAME_TS_619_NUM_OUTPUT_ROWS + CNTR_NAME_TS_592_NUM_OUTPUT_ROWS - CNTR_NAME_TS_619_TIME_TAKEN + CNTR_NAME_TS_592_TIME_TAKEN - CNTR_NAME_TS_619_FATAL_ERROR + CNTR_NAME_TS_592_FATAL_ERROR - TS_619 + TS_592 @@ -262,10 +262,10 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10002 @@ -274,7 +274,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10002 -ext-10002 @@ -328,11 +328,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/dest1 + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/dest1 transient_lastDdlTime - 1304060302 + 1310382393 @@ -382,13 +382,13 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10000 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10001 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10001 @@ -409,10 +409,10 @@ true - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10002 - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10000 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10000 @@ -438,7 +438,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10002 + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10002 @@ -528,11 +528,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060297 + 1310382391 @@ -594,11 +594,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060297 + 1310382391 @@ -626,238 +626,98 @@ - + - - - - - - - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10002 - - - true - - - 1 - - - pfile:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-22_925_1445560214059395005/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_617_NUM_INPUT_ROWS - - - CNTR_NAME_FS_617_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_617_TIME_TAKEN - - - CNTR_NAME_FS_617_FATAL_ERROR - - - - - FS_617 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - value - - - s - - - - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10002 - - _col0 - - - key - - - s - - - - - int - - - - + + true - - - - - - - - - - - - - + + 1 - - - - _col0 - - - _col1 - - + + pfile:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-33_170_7666241259022167376/-ext-10000/ - - true + + + + 1 + - CNTR_NAME_SEL_616_NUM_INPUT_ROWS + CNTR_NAME_FS_590_NUM_INPUT_ROWS - CNTR_NAME_SEL_616_NUM_OUTPUT_ROWS + CNTR_NAME_FS_590_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_616_TIME_TAKEN + CNTR_NAME_FS_590_TIME_TAKEN - CNTR_NAME_SEL_616_FATAL_ERROR + CNTR_NAME_FS_590_FATAL_ERROR - SEL_616 + FS_590 - + - - - - - - - _col0 - - - s - - - - - - - - - - _col1 - - - s - - - - - - - - - + - - - - - - - - - - key - - - s - - - - - - - - - - - - - 100 - - - - + + + + _col1 + + + value - - + + s - + + + + + + _col0 + + + key + + + s + + + - boolean + int @@ -865,24 +725,51 @@ + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + - CNTR_NAME_FIL_615_NUM_INPUT_ROWS + CNTR_NAME_SEL_589_NUM_INPUT_ROWS - CNTR_NAME_FIL_615_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_589_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_615_TIME_TAKEN + CNTR_NAME_SEL_589_TIME_TAKEN - CNTR_NAME_FIL_615_FATAL_ERROR + CNTR_NAME_SEL_589_FATAL_ERROR - FIL_615 + SEL_589 @@ -896,9 +783,9 @@ - + - key + _col0 s @@ -909,9 +796,9 @@ - + - value + _col1 s @@ -1049,7 +936,11 @@ - + + + boolean + + @@ -1061,21 +952,21 @@ - CNTR_NAME_FIL_614_NUM_INPUT_ROWS + CNTR_NAME_FIL_587_NUM_INPUT_ROWS - CNTR_NAME_FIL_614_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_587_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_614_TIME_TAKEN + CNTR_NAME_FIL_587_TIME_TAKEN - CNTR_NAME_FIL_614_FATAL_ERROR + CNTR_NAME_FIL_587_FATAL_ERROR - FIL_614 + FIL_587 @@ -1089,10 +980,30 @@ - + + + key + + + s + + + + + - + + + value + + + s + + + + + @@ -1109,165 +1020,32 @@ - - - - - - - - - - - - - - - - - - - key - - - s - - - - - - - - - - - - - - - - - - - - - - - 2147483647 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPBitAnd - - - & - - - - - - - - - - - - - - - 4 - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFOPMod - - - % - - - - - - - - - - - - - - - 0 - - - - + + + key - - + + s - + - - - - - - - key - - - s - - - - - - - - - - - - - 100 - - - - - - - - + - + + + 100 + - + @@ -1279,21 +1057,21 @@ - CNTR_NAME_FIL_618_NUM_INPUT_ROWS + CNTR_NAME_FIL_591_NUM_INPUT_ROWS - CNTR_NAME_FIL_618_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_591_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_618_TIME_TAKEN + CNTR_NAME_FIL_591_TIME_TAKEN - CNTR_NAME_FIL_618_FATAL_ERROR + CNTR_NAME_FIL_591_FATAL_ERROR - FIL_618 + FIL_591 @@ -1369,16 +1147,16 @@ - CNTR_NAME_TS_613_NUM_INPUT_ROWS + CNTR_NAME_TS_586_NUM_INPUT_ROWS - CNTR_NAME_TS_613_NUM_OUTPUT_ROWS + CNTR_NAME_TS_586_NUM_OUTPUT_ROWS - CNTR_NAME_TS_613_TIME_TAKEN + CNTR_NAME_TS_586_TIME_TAKEN - CNTR_NAME_TS_613_FATAL_ERROR + CNTR_NAME_TS_586_FATAL_ERROR @@ -1393,7 +1171,7 @@ - TS_613 + TS_586 @@ -1415,7 +1193,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt s @@ -1427,7 +1205,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket/srcbucket0.txt srcbucket0.txt @@ -1488,11 +1266,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060297 + 1310382391 @@ -1554,11 +1332,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/srcbucket + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/srcbucket transient_lastDdlTime - 1304060297 + 1310382391 Index: ql/src/test/results/compiler/plan/subq.q.xml =================================================================== --- ql/src/test/results/compiler/plan/subq.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/subq.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -41,7 +41,7 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10001 @@ -50,7 +50,7 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10000 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10000 1 @@ -92,21 +92,21 @@ - CNTR_NAME_FS_636_NUM_INPUT_ROWS + CNTR_NAME_FS_608_NUM_INPUT_ROWS - CNTR_NAME_FS_636_NUM_OUTPUT_ROWS + CNTR_NAME_FS_608_NUM_OUTPUT_ROWS - CNTR_NAME_FS_636_TIME_TAKEN + CNTR_NAME_FS_608_TIME_TAKEN - CNTR_NAME_FS_636_FATAL_ERROR + CNTR_NAME_FS_608_FATAL_ERROR - FS_636 + FS_608 @@ -160,21 +160,21 @@ - CNTR_NAME_TS_635_NUM_INPUT_ROWS + CNTR_NAME_TS_607_NUM_INPUT_ROWS - CNTR_NAME_TS_635_NUM_OUTPUT_ROWS + CNTR_NAME_TS_607_NUM_OUTPUT_ROWS - CNTR_NAME_TS_635_TIME_TAKEN + CNTR_NAME_TS_607_TIME_TAKEN - CNTR_NAME_TS_635_FATAL_ERROR + CNTR_NAME_TS_607_FATAL_ERROR - TS_635 + TS_607 @@ -195,10 +195,10 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10001 - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10001 @@ -207,7 +207,7 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10001 -ext-10001 @@ -280,7 +280,7 @@ true - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10000 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10000 ../build/ql/test/data/warehouse/union.out @@ -304,10 +304,10 @@ true - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10001 - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10000 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10000 @@ -333,7 +333,7 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10001 @@ -419,11 +419,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060310 + 1310382395 @@ -481,11 +481,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060310 + 1310382395 @@ -509,198 +509,65 @@ - + - + - - - - - - - - - 1 - - - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10001 - - - 1 - - - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-58-33_025_4812811828316012703/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_633_NUM_INPUT_ROWS - - - CNTR_NAME_FS_633_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_633_TIME_TAKEN - - - CNTR_NAME_FS_633_FATAL_ERROR - - - - - FS_633 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - _col1 - - - src - - - - - + + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10001 - - _col0 - - - _col0 - - - src - - - - - + + 1 - - - - - - - - - - - - - + + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-36_616_1136566510107816555/-ext-10000/ - - - - _col0 - - - _col1 - - + + - - true + + 1 - CNTR_NAME_SEL_632_NUM_INPUT_ROWS + CNTR_NAME_FS_605_NUM_INPUT_ROWS - CNTR_NAME_SEL_632_NUM_OUTPUT_ROWS + CNTR_NAME_FS_605_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_632_TIME_TAKEN + CNTR_NAME_FS_605_TIME_TAKEN - CNTR_NAME_SEL_632_FATAL_ERROR + CNTR_NAME_FS_605_FATAL_ERROR - SEL_632 + FS_605 - + - - - - - - - key - - - _col0 - - - src - - - - - - - - - - value - - - _col1 - - - src - - - - - - - - - + @@ -710,9 +577,9 @@ _col1 - + - value + _col1 src @@ -724,9 +591,9 @@ _col0 - + - key + _col0 src @@ -743,10 +610,10 @@ - + - + @@ -768,26 +635,26 @@ - CNTR_NAME_SEL_631_NUM_INPUT_ROWS + CNTR_NAME_SEL_604_NUM_INPUT_ROWS - CNTR_NAME_SEL_631_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_604_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_631_TIME_TAKEN + CNTR_NAME_SEL_604_TIME_TAKEN - CNTR_NAME_SEL_631_FATAL_ERROR + CNTR_NAME_SEL_604_FATAL_ERROR - SEL_631 + SEL_604 - + @@ -797,6 +664,9 @@ + + key + _col0 @@ -810,6 +680,9 @@ + + value + _col1 @@ -829,73 +702,83 @@ - - - - - - - - - - key - - - src - - - - - - - - - - - - int - - - - - 100 - - - - + + + + _col1 + + + value - - + + src - - - boolean - - + + + _col0 + + + key + + + src + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + - CNTR_NAME_FIL_630_NUM_INPUT_ROWS + CNTR_NAME_SEL_603_NUM_INPUT_ROWS - CNTR_NAME_FIL_630_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_603_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_630_TIME_TAKEN + CNTR_NAME_SEL_603_TIME_TAKEN - CNTR_NAME_FIL_630_FATAL_ERROR + CNTR_NAME_SEL_603_FATAL_ERROR - FIL_630 + SEL_603 @@ -909,9 +792,9 @@ - + - key + _col0 src @@ -922,9 +805,9 @@ - + - value + _col1 src @@ -964,7 +847,11 @@ - + + + int + + 100 @@ -977,7 +864,11 @@ - + + + boolean + + @@ -986,21 +877,21 @@ - CNTR_NAME_FIL_634_NUM_INPUT_ROWS + CNTR_NAME_FIL_606_NUM_INPUT_ROWS - CNTR_NAME_FIL_634_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_606_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_634_TIME_TAKEN + CNTR_NAME_FIL_606_TIME_TAKEN - CNTR_NAME_FIL_634_FATAL_ERROR + CNTR_NAME_FIL_606_FATAL_ERROR - FIL_634 + FIL_606 @@ -1014,10 +905,30 @@ - + + + key + + + src + + + + + - + + + value + + + src + + + + + @@ -1076,16 +987,16 @@ - CNTR_NAME_TS_629_NUM_INPUT_ROWS + CNTR_NAME_TS_601_NUM_INPUT_ROWS - CNTR_NAME_TS_629_NUM_OUTPUT_ROWS + CNTR_NAME_TS_601_NUM_OUTPUT_ROWS - CNTR_NAME_TS_629_TIME_TAKEN + CNTR_NAME_TS_601_TIME_TAKEN - CNTR_NAME_TS_629_FATAL_ERROR + CNTR_NAME_TS_601_FATAL_ERROR @@ -1100,7 +1011,7 @@ - TS_629 + TS_601 @@ -1119,7 +1030,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src unioninput:src @@ -1131,7 +1042,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -1188,11 +1099,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060310 + 1310382395 @@ -1250,11 +1161,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060310 + 1310382395 Index: ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf1.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -1,5 +1,5 @@ - + Stage-3 @@ -62,11 +62,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060320 + 1310382399 @@ -124,11 +124,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060320 + 1310382399 @@ -152,1261 +152,79 @@ - + - - - - - - - - - file:/tmp/sdong/hive_2011-04-28_23-58-42_926_5215082691597914017/-ext-10001 - - - 1 - - - file:/tmp/sdong/hive_2011-04-28_23-58-42_926_5215082691597914017/-ext-10001/ - - - - - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - columns - _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16 - - - serialization.format - 1 - - - columns.types - boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:string:string:string:string - - - - - - - 1 - - - - - - - CNTR_NAME_FS_648_NUM_INPUT_ROWS - - - CNTR_NAME_FS_648_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_648_TIME_TAKEN - - - CNTR_NAME_FS_648_FATAL_ERROR - - - - - FS_648 - - - - - - - - - - - - - - - - _col0 - - - - - - - - boolean - - - - - - - - - _col1 - - - - - - - - - - - - - _col2 - - - - - - - - - - - - - _col3 - - - - - - - - - - - - - _col4 - - - - - - - - - - - - - _col5 - - - - - - - - - - - - - _col6 - - - - - - - - - - - - - _col7 - - - - - - - - - - - - - _col8 - - - - - - - - - - - - - _col9 - - - - - - - - - - - - - _col10 - - - - - - - - - - - - - _col11 - - - - - - - - - - - - - _col12 - - - - - - - - - - - - - _col13 - - - - - - - - string - - - - - - - - - _col14 - - - - - - - - - - - - - _col15 - - - - - - - - - - - - - _col16 - - - - - - - - - - - - - - + + + + + file:/tmp/amarsri/hive_2011-07-11_04-06-40_095_2249714112585544624/-ext-10001 - - - - - - _col8 - - - - - - - - - - - - - - - - - - - - .* - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - + + 1 - - _col7 - - - - - - - - - - ab - - - - - - - - - - a - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - + + file:/tmp/amarsri/hive_2011-07-11_04-06-40_095_2249714112585544624/-ext-10001/ - - _col6 - - - - - - - - - - ab - - - - - - - - - - _a% - - - - + + + + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - + + org.apache.hadoop.mapred.TextInputFormat - - + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - _col5 - - - - - - - - - - ab - - + + + + columns + _col0,_col1,_col2,_col3,_col4,_col5,_col6,_col7,_col8,_col9,_col10,_col11,_col12,_col13,_col14,_col15,_col16 - - - - - - - \%\_ - - + + serialization.format + 1 - - - - - - true + + columns.types + boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:boolean:string:string:string:string - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - _col4 - - - - - - - - - - %_ - - - - - - - - - - \%\_ - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - + + 1 - - _col3 - - - - - - - - - - ab - - - - - - - - - - %a_ - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - - - - _col2 - - - - - - - - - - ab - - - - - - - - - - %a% - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - - - - _col1 - - - - - - - - - - b - - - - - - - - - - %a% - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - - - - _col9 - - - - - - - - - - a - - - - - - - - - - [ab] - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - - - - _col13 - - - - - - - - - - abc - - - - - - - - - - b - - - - - - - - - - c - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - regexp_replace - - - - - - - - - - _col12 - - - - - - - - - - hadoop - - - - - - - - - - o* - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - - - - _col11 - - - - - - - - - - hadoop - - - - - - - - - - [a-z]* - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - - - - _col10 - - - - - - - - - - - - - - - - - - - - [ab] - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFRegExp - - - rlike - - - - - - - - - - _col16 - - - - - - - - - - hadoop - - - - - - - - - - (.)[a-z]* - - - - - - - - - - $1ive - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - regexp_replace - - - - - - - - - - _col15 - - - - - - - - - - abbbb - - - - - - - - - - bb - - - - - - - - - - b - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - regexp_replace - - - - - - - - - - _col14 - - - - - - - - - - abc - - - - - - - - - - z - - - - - - - - - - a - - - - - - - - - org.apache.hadoop.hive.ql.udf.UDFRegExpReplace - - - regexp_replace - - - - - - - - - - _col0 - - - - - - - - - - a - - - - - - - - - - %a% - - - - - - - - - true - - - org.apache.hadoop.hive.ql.udf.UDFLike - - - like - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - _col2 - - - _col3 - - - _col4 - - - _col5 - - - _col6 - - - _col7 - - - _col8 - - - _col9 - - - _col10 - - - _col11 - - - _col12 - - - _col13 - - - _col14 - - - _col15 - - - _col16 - - - - - - CNTR_NAME_SEL_647_NUM_INPUT_ROWS + CNTR_NAME_FS_619_NUM_INPUT_ROWS - CNTR_NAME_SEL_647_NUM_OUTPUT_ROWS + CNTR_NAME_FS_619_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_647_TIME_TAKEN + CNTR_NAME_FS_619_TIME_TAKEN - CNTR_NAME_SEL_647_FATAL_ERROR + CNTR_NAME_FS_619_FATAL_ERROR - SEL_647 + FS_619 - + @@ -1416,25 +234,29 @@ - - _c0 - _col0 + + + - + + + boolean + + - - _c1 - _col1 + + + @@ -1442,12 +264,12 @@ - - _c2 - _col2 + + + @@ -1455,12 +277,12 @@ - - _c3 - _col3 + + + @@ -1468,12 +290,12 @@ - - _c4 - _col4 + + + @@ -1481,12 +303,12 @@ - - _c5 - _col5 + + + @@ -1494,12 +316,12 @@ - - _c6 - _col6 + + + @@ -1507,12 +329,12 @@ - - _c7 - _col7 + + + @@ -1520,12 +342,12 @@ - - _c8 - _col8 + + + @@ -1533,12 +355,12 @@ - - _c9 - _col9 + + + @@ -1546,12 +368,12 @@ - - _c10 - _col10 + + + @@ -1559,12 +381,12 @@ - - _c11 - _col11 + + + @@ -1572,12 +394,12 @@ - - _c12 - _col12 + + + @@ -1585,25 +407,29 @@ - - _c13 - _col13 + + + - + + + string + + - - _c14 - _col14 + + + @@ -1611,12 +437,12 @@ - - _c15 - _col15 + + + @@ -1624,12 +450,12 @@ - - _c16 - _col16 + + + @@ -1643,69 +469,935 @@ - - - - + + + + _col8 + - - - key + + + - - src + + + + + + + + .* + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col7 + + + - - - int - - + - 86 + ab + + + + + + + a + + + - + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + _col6 + + + + + + + + + + ab + + + + + + + + + + _a% + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col5 + + + + + + + + + + ab + + + + + + + + + + \%\_ + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col4 + + + + + + + + + + %_ + + + + + + + + + + \%\_ + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col3 + + + + + + + + + + ab + + + + + + + + + + %a_ + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col2 + + + + + + + + + + ab + + + + + + + + + + %a% + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col1 + + + + + + + + + + b + + + + + + + + + + %a% + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + _col9 + + + + + + + + + + a + + + + + + + + + + [ab] + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col13 + + + + + + + + + + abc + + + + + + + + + + b + + + + + + + + + + c + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + regexp_replace + + + + + + + + + + _col12 + + + + + + + + + + hadoop + + + + + + + + + + o* + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col11 + + + + + + + + + + hadoop + + + + + + + + + + [a-z]* + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col10 + + + + + + + + + + + + + + + + + + + + [ab] + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFRegExp + + + rlike + + + + + + + + + + _col16 + + + + + + + + + + hadoop + + + + + + + + + + (.)[a-z]* + + + + + + + + + + $1ive + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + regexp_replace + + + + + + + + + + _col15 + + + + + + + + + + abbbb + + + + + + + + + + bb + + + + + + + + + + b + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + regexp_replace + + + + + + + + + + _col14 + + + + + + + + + + abc + + + + + + + + + + z + + + + + + + + + + a + + + + + + + + + org.apache.hadoop.hive.ql.udf.UDFRegExpReplace + + + regexp_replace + + + + + + + + + + _col0 + + + + + + + + + + a + + + + + + + + + + %a% + + + + + + + + + true + + + org.apache.hadoop.hive.ql.udf.UDFLike + + + like + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + _col2 + + + _col3 + + + _col4 + + + _col5 + + + _col6 + + + _col7 + + + _col8 + + + _col9 + + + _col10 + + + _col11 + + + _col12 + + + _col13 + + + _col14 + + + _col15 + + + _col16 + + + + + - CNTR_NAME_FIL_646_NUM_INPUT_ROWS + CNTR_NAME_SEL_618_NUM_INPUT_ROWS - CNTR_NAME_FIL_646_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_618_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_646_TIME_TAKEN + CNTR_NAME_SEL_618_TIME_TAKEN - CNTR_NAME_FIL_646_FATAL_ERROR + CNTR_NAME_SEL_618_FATAL_ERROR - FIL_646 + SEL_618 @@ -1719,18 +1411,226 @@ - + + + _c0 + - key + _col0 - - src + + + + + + + + _c1 + + + _col1 + + + + + + + + + _c2 + + + _col2 + + + + + + + + + + _c3 + + + _col3 + + + + + + + + + + _c4 + + + _col4 + + + + + + + + + + _c5 + + + _col5 + + + + + + + + + + _c6 + + + _col6 + + + + + + + + + + _c7 + + + _col7 + + + + + + + + + + _c8 + + + _col8 + + + + + + + + + + _c9 + + + _col9 + + + + + + + + + + _c10 + + + _col10 + + + + + + + + + + _c11 + + + _col11 + + + + + + + + + + _c12 + + + _col12 + + + + + + + + + + _c13 + + + _col13 + + + + + + _c14 + + + _col14 + + + + + + + + + + _c15 + + + _col15 + + + + + + + + + + _c16 + + + _col16 + + + + + + @@ -1761,7 +1661,11 @@ - + + + int + + 86 @@ -1783,21 +1687,21 @@ - CNTR_NAME_FIL_649_NUM_INPUT_ROWS + CNTR_NAME_FIL_620_NUM_INPUT_ROWS - CNTR_NAME_FIL_649_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_620_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_649_TIME_TAKEN + CNTR_NAME_FIL_620_TIME_TAKEN - CNTR_NAME_FIL_649_FATAL_ERROR + CNTR_NAME_FIL_620_FATAL_ERROR - FIL_649 + FIL_620 @@ -1811,7 +1715,17 @@ - + + + key + + + src + + + + + @@ -1883,16 +1797,16 @@ - CNTR_NAME_TS_645_NUM_INPUT_ROWS + CNTR_NAME_TS_616_NUM_INPUT_ROWS - CNTR_NAME_TS_645_NUM_OUTPUT_ROWS + CNTR_NAME_TS_616_NUM_OUTPUT_ROWS - CNTR_NAME_TS_645_TIME_TAKEN + CNTR_NAME_TS_616_TIME_TAKEN - CNTR_NAME_TS_645_FATAL_ERROR + CNTR_NAME_TS_616_FATAL_ERROR @@ -1904,7 +1818,7 @@ - TS_645 + TS_616 @@ -1923,7 +1837,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -1935,7 +1849,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -1992,11 +1906,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060320 + 1310382399 @@ -2054,11 +1968,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060320 + 1310382399 Index: ql/src/test/results/compiler/plan/udf4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf4.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/udf4.q.xml (working copy) @@ -201,21 +201,21 @@ - CNTR_NAME_FS_657_NUM_INPUT_ROWS + CNTR_NAME_FS_627_NUM_INPUT_ROWS - CNTR_NAME_FS_657_NUM_OUTPUT_ROWS + CNTR_NAME_FS_627_NUM_OUTPUT_ROWS - CNTR_NAME_FS_657_TIME_TAKEN + CNTR_NAME_FS_627_TIME_TAKEN - CNTR_NAME_FS_657_FATAL_ERROR + CNTR_NAME_FS_627_FATAL_ERROR - FS_657 + FS_627 @@ -1367,21 +1367,21 @@ - CNTR_NAME_SEL_656_NUM_INPUT_ROWS + CNTR_NAME_SEL_626_NUM_INPUT_ROWS - CNTR_NAME_SEL_656_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_626_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_656_TIME_TAKEN + CNTR_NAME_SEL_626_TIME_TAKEN - CNTR_NAME_SEL_656_FATAL_ERROR + CNTR_NAME_SEL_626_FATAL_ERROR - SEL_656 + SEL_626 @@ -1662,16 +1662,16 @@ - CNTR_NAME_TS_655_NUM_INPUT_ROWS + CNTR_NAME_TS_625_NUM_INPUT_ROWS - CNTR_NAME_TS_655_NUM_OUTPUT_ROWS + CNTR_NAME_TS_625_NUM_OUTPUT_ROWS - CNTR_NAME_TS_655_TIME_TAKEN + CNTR_NAME_TS_625_TIME_TAKEN - CNTR_NAME_TS_655_FATAL_ERROR + CNTR_NAME_TS_625_FATAL_ERROR @@ -1679,7 +1679,7 @@ - TS_655 + TS_625 Index: ql/src/test/results/compiler/plan/udf6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf6.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/udf6.q.xml (working copy) @@ -201,21 +201,21 @@ - CNTR_NAME_FS_663_NUM_INPUT_ROWS + CNTR_NAME_FS_633_NUM_INPUT_ROWS - CNTR_NAME_FS_663_NUM_OUTPUT_ROWS + CNTR_NAME_FS_633_NUM_OUTPUT_ROWS - CNTR_NAME_FS_663_TIME_TAKEN + CNTR_NAME_FS_633_TIME_TAKEN - CNTR_NAME_FS_663_FATAL_ERROR + CNTR_NAME_FS_633_FATAL_ERROR - FS_663 + FS_633 @@ -392,21 +392,21 @@ - CNTR_NAME_SEL_662_NUM_INPUT_ROWS + CNTR_NAME_SEL_632_NUM_INPUT_ROWS - CNTR_NAME_SEL_662_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_632_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_662_TIME_TAKEN + CNTR_NAME_SEL_632_TIME_TAKEN - CNTR_NAME_SEL_662_FATAL_ERROR + CNTR_NAME_SEL_632_FATAL_ERROR - SEL_662 + SEL_632 @@ -466,16 +466,16 @@ - CNTR_NAME_TS_661_NUM_INPUT_ROWS + CNTR_NAME_TS_631_NUM_INPUT_ROWS - CNTR_NAME_TS_661_NUM_OUTPUT_ROWS + CNTR_NAME_TS_631_NUM_OUTPUT_ROWS - CNTR_NAME_TS_661_TIME_TAKEN + CNTR_NAME_TS_631_TIME_TAKEN - CNTR_NAME_TS_661_FATAL_ERROR + CNTR_NAME_TS_631_FATAL_ERROR @@ -483,7 +483,7 @@ - TS_661 + TS_631 Index: ql/src/test/results/compiler/plan/udf_case.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_case.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/udf_case.q.xml (working copy) @@ -205,21 +205,21 @@ - CNTR_NAME_FS_670_NUM_INPUT_ROWS + CNTR_NAME_FS_640_NUM_INPUT_ROWS - CNTR_NAME_FS_670_NUM_OUTPUT_ROWS + CNTR_NAME_FS_640_NUM_OUTPUT_ROWS - CNTR_NAME_FS_670_TIME_TAKEN + CNTR_NAME_FS_640_TIME_TAKEN - CNTR_NAME_FS_670_FATAL_ERROR + CNTR_NAME_FS_640_FATAL_ERROR - FS_670 + FS_640 @@ -280,21 +280,21 @@ - CNTR_NAME_LIM_669_NUM_INPUT_ROWS + CNTR_NAME_LIM_639_NUM_INPUT_ROWS - CNTR_NAME_LIM_669_NUM_OUTPUT_ROWS + CNTR_NAME_LIM_639_NUM_OUTPUT_ROWS - CNTR_NAME_LIM_669_TIME_TAKEN + CNTR_NAME_LIM_639_TIME_TAKEN - CNTR_NAME_LIM_669_FATAL_ERROR + CNTR_NAME_LIM_639_FATAL_ERROR - LIM_669 + LIM_639 @@ -512,21 +512,21 @@ - CNTR_NAME_SEL_668_NUM_INPUT_ROWS + CNTR_NAME_SEL_638_NUM_INPUT_ROWS - CNTR_NAME_SEL_668_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_638_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_668_TIME_TAKEN + CNTR_NAME_SEL_638_TIME_TAKEN - CNTR_NAME_SEL_668_FATAL_ERROR + CNTR_NAME_SEL_638_FATAL_ERROR - SEL_668 + SEL_638 @@ -559,16 +559,16 @@ - CNTR_NAME_TS_667_NUM_INPUT_ROWS + CNTR_NAME_TS_637_NUM_INPUT_ROWS - CNTR_NAME_TS_667_NUM_OUTPUT_ROWS + CNTR_NAME_TS_637_NUM_OUTPUT_ROWS - CNTR_NAME_TS_667_TIME_TAKEN + CNTR_NAME_TS_637_TIME_TAKEN - CNTR_NAME_TS_667_FATAL_ERROR + CNTR_NAME_TS_637_FATAL_ERROR @@ -576,7 +576,7 @@ - TS_667 + TS_637 Index: ql/src/test/results/compiler/plan/udf_when.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_when.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/udf_when.q.xml (working copy) @@ -205,21 +205,21 @@ - CNTR_NAME_FS_678_NUM_INPUT_ROWS + CNTR_NAME_FS_648_NUM_INPUT_ROWS - CNTR_NAME_FS_678_NUM_OUTPUT_ROWS + CNTR_NAME_FS_648_NUM_OUTPUT_ROWS - CNTR_NAME_FS_678_TIME_TAKEN + CNTR_NAME_FS_648_TIME_TAKEN - CNTR_NAME_FS_678_FATAL_ERROR + CNTR_NAME_FS_648_FATAL_ERROR - FS_678 + FS_648 @@ -280,21 +280,21 @@ - CNTR_NAME_LIM_677_NUM_INPUT_ROWS + CNTR_NAME_LIM_647_NUM_INPUT_ROWS - CNTR_NAME_LIM_677_NUM_OUTPUT_ROWS + CNTR_NAME_LIM_647_NUM_OUTPUT_ROWS - CNTR_NAME_LIM_677_TIME_TAKEN + CNTR_NAME_LIM_647_TIME_TAKEN - CNTR_NAME_LIM_677_FATAL_ERROR + CNTR_NAME_LIM_647_FATAL_ERROR - LIM_677 + LIM_647 @@ -592,21 +592,21 @@ - CNTR_NAME_SEL_676_NUM_INPUT_ROWS + CNTR_NAME_SEL_646_NUM_INPUT_ROWS - CNTR_NAME_SEL_676_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_646_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_676_TIME_TAKEN + CNTR_NAME_SEL_646_TIME_TAKEN - CNTR_NAME_SEL_676_FATAL_ERROR + CNTR_NAME_SEL_646_FATAL_ERROR - SEL_676 + SEL_646 @@ -639,16 +639,16 @@ - CNTR_NAME_TS_675_NUM_INPUT_ROWS + CNTR_NAME_TS_645_NUM_INPUT_ROWS - CNTR_NAME_TS_675_NUM_OUTPUT_ROWS + CNTR_NAME_TS_645_NUM_OUTPUT_ROWS - CNTR_NAME_TS_675_TIME_TAKEN + CNTR_NAME_TS_645_TIME_TAKEN - CNTR_NAME_TS_675_FATAL_ERROR + CNTR_NAME_TS_645_FATAL_ERROR @@ -656,7 +656,7 @@ - TS_675 + TS_645 Index: ql/src/test/results/compiler/plan/union.q.xml =================================================================== --- ql/src/test/results/compiler/plan/union.q.xml (revision 1145463) +++ ql/src/test/results/compiler/plan/union.q.xml (working copy) @@ -1,5 +1,5 @@ - + @@ -41,7 +41,7 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10001 @@ -50,7 +50,7 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10000 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10000 1 @@ -92,21 +92,21 @@ - CNTR_NAME_FS_695_NUM_INPUT_ROWS + CNTR_NAME_FS_665_NUM_INPUT_ROWS - CNTR_NAME_FS_695_NUM_OUTPUT_ROWS + CNTR_NAME_FS_665_NUM_OUTPUT_ROWS - CNTR_NAME_FS_695_TIME_TAKEN + CNTR_NAME_FS_665_TIME_TAKEN - CNTR_NAME_FS_695_FATAL_ERROR + CNTR_NAME_FS_665_FATAL_ERROR - FS_695 + FS_665 @@ -160,21 +160,21 @@ - CNTR_NAME_TS_694_NUM_INPUT_ROWS + CNTR_NAME_TS_664_NUM_INPUT_ROWS - CNTR_NAME_TS_694_NUM_OUTPUT_ROWS + CNTR_NAME_TS_664_NUM_OUTPUT_ROWS - CNTR_NAME_TS_694_TIME_TAKEN + CNTR_NAME_TS_664_TIME_TAKEN - CNTR_NAME_TS_694_FATAL_ERROR + CNTR_NAME_TS_664_FATAL_ERROR - TS_694 + TS_664 @@ -195,10 +195,10 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10001 - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10001 @@ -207,7 +207,7 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10001 -ext-10001 @@ -280,7 +280,7 @@ true - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10000 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10000 ../build/ql/test/data/warehouse/union.out @@ -304,10 +304,10 @@ true - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10001 - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10000 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10000 @@ -333,7 +333,7 @@ - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10001 + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10001 @@ -419,11 +419,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060370 + 1310382416 @@ -481,11 +481,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060370 + 1310382416 @@ -551,11 +551,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060370 + 1310382416 @@ -613,11 +613,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060370 + 1310382416 @@ -641,620 +641,516 @@ - + - + - + - - - - - - - - - 1 - - - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10001 - - - 1 - - - file:/data/users/sdong/www/open-source-hive1/build/ql/scratchdir/hive_2011-04-28_23-59-33_025_642210603550334429/-ext-10000/ - - - - - - 1 - - - - - - - CNTR_NAME_FS_691_NUM_INPUT_ROWS - - - CNTR_NAME_FS_691_NUM_OUTPUT_ROWS - - - CNTR_NAME_FS_691_TIME_TAKEN - - - CNTR_NAME_FS_691_FATAL_ERROR - - - - - FS_691 - - - - - - - - - - - - + + + + + 1 - - - - - - _col1 - - - _col1 - - - src - - - - - + + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10001 - - _col0 - - - _col0 - - - src - - - - - + + 1 - - - - - - - - - - - - - + + file:/home/amarsri/workspace/hive/build/ql/scratchdir/hive_2011-07-11_04-06-57_227_5273764780066863405/-ext-10000/ - - - - _col0 - - - _col1 - - + + - - true + + 1 - CNTR_NAME_SEL_690_NUM_INPUT_ROWS + CNTR_NAME_FS_661_NUM_INPUT_ROWS - CNTR_NAME_SEL_690_NUM_OUTPUT_ROWS + CNTR_NAME_FS_661_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_690_TIME_TAKEN + CNTR_NAME_FS_661_TIME_TAKEN - CNTR_NAME_SEL_690_FATAL_ERROR + CNTR_NAME_FS_661_FATAL_ERROR - SEL_690 + FS_661 - + - - - - - - - key - - - _col0 - - - src - - - - - - - - - - value - - - _col1 - - - src - - - - - - - - - + + + + + _col1 + + + _col1 + + + src + + + + + + + + _col0 + + + _col0 + + + src + + + + + + + + - + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + - CNTR_NAME_UNION_689_NUM_INPUT_ROWS + CNTR_NAME_SEL_660_NUM_INPUT_ROWS - CNTR_NAME_UNION_689_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_660_NUM_OUTPUT_ROWS - CNTR_NAME_UNION_689_TIME_TAKEN + CNTR_NAME_SEL_660_TIME_TAKEN - CNTR_NAME_UNION_689_FATAL_ERROR + CNTR_NAME_SEL_660_FATAL_ERROR - UNION_689 + SEL_660 - + - - - - - - + + + + + + + + + + key + + _col0 + + + src + + + + - - - + + + + value + + _col1 - - - value - - - src - - - - - - - _col0 - - - key - - - src - - - - - + + src + + + + + + + + + + + + + + + + + + CNTR_NAME_UNION_659_NUM_INPUT_ROWS + + + CNTR_NAME_UNION_659_NUM_OUTPUT_ROWS + + + CNTR_NAME_UNION_659_TIME_TAKEN + + + CNTR_NAME_UNION_659_FATAL_ERROR + + + + + UNION_659 + + + + + + + + + + + + + + + + + + + _col1 + + + value + + + src + + + + + + + + _col0 + + + key + + + src + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + true + + + + + + + CNTR_NAME_SEL_658_NUM_INPUT_ROWS + + + CNTR_NAME_SEL_658_NUM_OUTPUT_ROWS + + + CNTR_NAME_SEL_658_TIME_TAKEN + + + CNTR_NAME_SEL_658_FATAL_ERROR + + + + + SEL_658 + + + + + + + + + + + + - - - - - + + + + + + + + + key + + + src + + + + + + + + + + + + int + + + + + 100 + + + + - - + + - - - - - - _col0 + + + + boolean + + - - _col1 - - - true - - CNTR_NAME_SEL_688_NUM_INPUT_ROWS + CNTR_NAME_FIL_663_NUM_INPUT_ROWS - CNTR_NAME_SEL_688_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_663_NUM_OUTPUT_ROWS - CNTR_NAME_SEL_688_TIME_TAKEN + CNTR_NAME_FIL_663_TIME_TAKEN - CNTR_NAME_SEL_688_FATAL_ERROR + CNTR_NAME_FIL_663_FATAL_ERROR - SEL_688 + FIL_663 - + - + - - - - - - - - - key - - - src - - - - - - - - - - - - int - - - - - 100 - - - - - - - - - - - - boolean - - - - + + + src + + + - CNTR_NAME_FIL_687_NUM_INPUT_ROWS + CNTR_NAME_TS_656_NUM_INPUT_ROWS - CNTR_NAME_FIL_687_NUM_OUTPUT_ROWS + CNTR_NAME_TS_656_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_687_TIME_TAKEN + CNTR_NAME_TS_656_TIME_TAKEN - CNTR_NAME_FIL_687_FATAL_ERROR + CNTR_NAME_TS_656_FATAL_ERROR - - FIL_687 - - + - - - - - + 0 + + + 1 + + + + + TS_656 + + + + + + + + + key - - - - - - - - - - - - key - - - src - - - - - - - - - - - - - 100 - - - - - - - - - - - - + + src + + + - - - - CNTR_NAME_FIL_693_NUM_INPUT_ROWS + + + + value - - CNTR_NAME_FIL_693_NUM_OUTPUT_ROWS + + src - - CNTR_NAME_FIL_693_TIME_TAKEN + + - - CNTR_NAME_FIL_693_FATAL_ERROR - - - FIL_693 - - - - - - - - - - - + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + src + + + + + bigint - - - - src - - - - - - - - - - CNTR_NAME_TS_686_NUM_INPUT_ROWS - - - CNTR_NAME_TS_686_NUM_OUTPUT_ROWS - - - CNTR_NAME_TS_686_TIME_TAKEN - - - CNTR_NAME_TS_686_FATAL_ERROR - - - - - - - 0 - - - 1 - - - - - TS_686 - - - - - - - - - key - - - src - - - - - - - - - - value - - - src - - - - - - - - - - true - - - BLOCK__OFFSET__INSIDE__FILE - - - src - - - - - bigint - - - - - - - - - true - - - INPUT__FILE__NAME - - - src - - - - - - - - - - - - - - + + + + true + + INPUT__FILE__NAME + + + src + + + + - - - - - - - - - - - - - - @@ -1262,34 +1158,7 @@ - - - - - _col0 - - - src - - - - - - - - - - _col1 - - - src - - - - - - - + @@ -1302,7 +1171,7 @@ - + _col0 @@ -1315,7 +1184,7 @@ - + _col1 @@ -1335,143 +1204,96 @@ - - - - _col1 - - - value - - - src - - - - - - - - _col0 - - - key - - - src - - - - - - - - - - - + + + - + + + _col0 + + + src + + + + + - + + + _col1 + + + src + + + + + - - - - _col0 - - - _col1 - - - - - - - CNTR_NAME_SEL_685_NUM_INPUT_ROWS - - - CNTR_NAME_SEL_685_NUM_OUTPUT_ROWS - - - CNTR_NAME_SEL_685_TIME_TAKEN - - - CNTR_NAME_SEL_685_FATAL_ERROR - - + + + + + + + + _col1 + + + value - - SEL_685 + + src - - - - - - + + - - - - - - - - - - - - - + + + + _col0 + + + key + + src + + + + - - - - - - - - - key - - - src - - - - - - - - - - - - - 100 - - - - + + + + + - - + + - - + + + + + + _col0 + + _col1 + @@ -1479,21 +1301,21 @@ - CNTR_NAME_FIL_684_NUM_INPUT_ROWS + CNTR_NAME_SEL_655_NUM_INPUT_ROWS - CNTR_NAME_FIL_684_NUM_OUTPUT_ROWS + CNTR_NAME_SEL_655_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_684_TIME_TAKEN + CNTR_NAME_SEL_655_TIME_TAKEN - CNTR_NAME_FIL_684_FATAL_ERROR + CNTR_NAME_SEL_655_FATAL_ERROR - FIL_684 + SEL_655 @@ -1507,30 +1329,10 @@ - - - key - - - src - - - - - + - - - value - - - src - - - - - + @@ -1584,21 +1386,21 @@ - CNTR_NAME_FIL_692_NUM_INPUT_ROWS + CNTR_NAME_FIL_662_NUM_INPUT_ROWS - CNTR_NAME_FIL_692_NUM_OUTPUT_ROWS + CNTR_NAME_FIL_662_NUM_OUTPUT_ROWS - CNTR_NAME_FIL_692_TIME_TAKEN + CNTR_NAME_FIL_662_TIME_TAKEN - CNTR_NAME_FIL_692_FATAL_ERROR + CNTR_NAME_FIL_662_FATAL_ERROR - FIL_692 + FIL_662 @@ -1612,10 +1414,30 @@ - + + + key + + + src + + + + + - + + + value + + + src + + + + + @@ -1670,16 +1492,16 @@ - CNTR_NAME_TS_683_NUM_INPUT_ROWS + CNTR_NAME_TS_653_NUM_INPUT_ROWS - CNTR_NAME_TS_683_NUM_OUTPUT_ROWS + CNTR_NAME_TS_653_NUM_OUTPUT_ROWS - CNTR_NAME_TS_683_TIME_TAKEN + CNTR_NAME_TS_653_TIME_TAKEN - CNTR_NAME_TS_683_FATAL_ERROR + CNTR_NAME_TS_653_FATAL_ERROR @@ -1694,7 +1516,7 @@ - TS_683 + TS_653 @@ -1717,7 +1539,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src null-subquery1:unioninput-subquery1:src @@ -1732,7 +1554,7 @@ - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src src @@ -1789,11 +1611,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060370 + 1310382416 @@ -1851,11 +1673,11 @@ location - pfile:/data/users/sdong/www/open-source-hive1/build/ql/test/data/warehouse/src + pfile:/home/amarsri/workspace/hive/build/ql/test/data/warehouse/src transient_lastDdlTime - 1304060370 + 1310382416