diff --git ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java index 58ed550..cb010fb 100644 --- ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java @@ -243,31 +243,28 @@ static public String getOperatorName() { return "TS"; } - // This 'neededColumnIDs' field is included in this operator class instead of - // its desc class.The reason is that 1)tableScanDesc can not be instantiated, - // and 2) it will fail some join and union queries if this is added forcibly - // into tableScanDesc. - // Both neededColumnIDs and neededColumns should never be null. - // When neededColumnIDs is an empty list, - // it means no needed column (e.g. we do not need any column to evaluate - // SELECT count(*) FROM t). - List neededColumnIDs; - List neededColumns; - public void setNeededColumnIDs(List orign_columns) { - neededColumnIDs = orign_columns; + conf.setNeededColumnIDs(orign_columns); } public List getNeededColumnIDs() { - return neededColumnIDs; + return conf.getNeededColumnIDs(); } public void setNeededColumns(List columnNames) { - neededColumns = columnNames; + conf.setNeededColumns(columnNames); } public List getNeededColumns() { - return neededColumns; + return conf.getNeededColumns(); + } + + public void setReferencedColumns(List referencedColumns) { + conf.setReferencedColumns(referencedColumns); + } + + public List getReferencedColumns() { + return conf.getReferencedColumns(); } @Override @@ -335,6 +332,7 @@ public boolean supportAutomaticSortMergeJoin() { TableScanOperator ts = (TableScanOperator) super.clone(); ts.setNeededColumnIDs(new ArrayList(getNeededColumnIDs())); ts.setNeededColumns(new ArrayList(getNeededColumns())); + ts.setReferencedColumns(new ArrayList(getReferencedColumns())); return ts; } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java index 6a4dc9b..f1ebd99 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java @@ -312,6 +312,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, cols); List neededColumnIds = new ArrayList(); List neededColumnNames = new ArrayList(); + List referencedColumnNames = new ArrayList(); RowResolver inputRR = cppCtx.getOpToParseCtxMap().get(scanOp).getRowResolver(); TableScanDesc desc = scanOp.getConf(); List virtualCols = desc.getVirtualCols(); @@ -322,11 +323,12 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, cols.add(VirtualColumn.RAWDATASIZE.getName()); } - for (int i = 0; i < cols.size(); i++) { - String[] tabCol = inputRR.reverseLookup(cols.get(i)); - if(tabCol == null) { + for (String column : cols) { + String[] tabCol = inputRR.reverseLookup(column); + if (tabCol == null) { continue; } + referencedColumnNames.add(column); ColumnInfo colInfo = inputRR.get(tabCol[0], tabCol[1]); if (colInfo.getIsVirtualCol()) { // part is also a virtual column, but part col should not in this @@ -340,17 +342,18 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx ctx, //no need to pass virtual columns to reader. continue; } - int position = inputRR.getPosition(cols.get(i)); + int position = inputRR.getPosition(column); if (position >= 0) { // get the needed columns by id and name neededColumnIds.add(position); - neededColumnNames.add(cols.get(i)); + neededColumnNames.add(column); } } desc.setVirtualCols(newVirtualCols); scanOp.setNeededColumnIDs(neededColumnIds); scanOp.setNeededColumns(neededColumnNames); + scanOp.setReferencedColumns(referencedColumnNames); return null; } } diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java index 8c4b891..7f574dc 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMRTableScan1.java @@ -147,7 +147,8 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx opProcCtx, .getConfirmedPartitionsForScan(parseInfo); if (confirmedPartns.size() > 0) { Table source = parseCtx.getQB().getMetaData().getTableForAlias(alias); - PrunedPartitionList partList = new PrunedPartitionList(source, confirmedPartns, false); + List partCols = GenMapRedUtils.getPartitionColumns(parseInfo); + PrunedPartitionList partList = new PrunedPartitionList(source, confirmedPartns, partCols, false); GenMapRedUtils.setTaskPlan(currAliasId, currTopOp, currTask, false, ctx, partList); } else { // non-partitioned table GenMapRedUtils.setTaskPlan(currAliasId, currTopOp, currTask, false, ctx); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java index f285312..77f56c1 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/GenMapRedUtils.java @@ -20,6 +20,7 @@ import java.io.Serializable; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -927,6 +928,7 @@ public static TableScanOperator createTemporaryTableScanOperator(RowSchema rowSc } tableScanOp.setNeededColumnIDs(neededColumnIds); tableScanOp.setNeededColumns(neededColumnNames); + tableScanOp.setReferencedColumns(neededColumnNames); return tableScanOp; } @@ -1747,6 +1749,14 @@ public static Path createMoveTask(Task currTask, boolean return confirmedPartns; } + public static List getPartitionColumns(QBParseInfo parseInfo) { + tableSpec tblSpec = parseInfo.getTableSpec(); + if (tblSpec.tableHandle.isPartitioned()) { + return new ArrayList(tblSpec.getPartSpec().keySet()); + } + return Collections.emptyList(); + } + public static List getInputPathsForPartialScan(QBParseInfo parseInfo, StringBuffer aggregationKey) throws SemanticException { List inputPaths = new ArrayList(); diff --git ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java index 6bdf394..3c26894 100644 --- ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java +++ ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java @@ -212,9 +212,9 @@ static private ExprNodeDesc compactExpr(ExprNodeDesc expr) { return isAnd ? children.get(0) : null; } } - return (ExprNodeGenericFuncDesc)expr; + return expr; } else { - throw new IllegalStateException("Unexpected type of ExprNodeDesc: " + expr.getExprString()); + throw new IllegalStateException("Unexpected type of ExprNodeDesc: " + expr.getExprString()); } } @@ -225,18 +225,23 @@ static private ExprNodeDesc compactExpr(ExprNodeDesc expr) { * The expression is only used to prune by partition name, so we have no business with VCs. * @param expr original partition pruning expression. * @param partCols list of partition columns for the table. + * @param referred partition columns referred by expr * @return partition pruning expression that only contains partition columns from the list. */ - static private ExprNodeDesc removeNonPartCols(ExprNodeDesc expr, List partCols) { - if (expr instanceof ExprNodeColumnDesc - && !partCols.contains(((ExprNodeColumnDesc) expr).getColumn())) { - // Column doesn't appear to be a partition column for the table. - return new ExprNodeConstantDesc(expr.getTypeInfo(), null); + static private ExprNodeDesc removeNonPartCols(ExprNodeDesc expr, List partCols, + Set referred) { + if (expr instanceof ExprNodeColumnDesc) { + String column = ((ExprNodeColumnDesc) expr).getColumn(); + if (!partCols.contains(column)) { + // Column doesn't appear to be a partition column for the table. + return new ExprNodeConstantDesc(expr.getTypeInfo(), null); + } + referred.add(column); } if (expr instanceof ExprNodeGenericFuncDesc) { List children = expr.getChildren(); for (int i = 0; i < children.size(); ++i) { - children.set(i, removeNonPartCols(children.get(i), partCols)); + children.set(i, removeNonPartCols(children.get(i), partCols, referred)); } } return expr; @@ -266,7 +271,7 @@ private static PrunedPartitionList getPartitionsFromServer(Table tab, try { if (!tab.isPartitioned()) { // If the table is not partitioned, return everything. - return new PrunedPartitionList(tab, getAllPartitions(tab), false); + return new PrunedPartitionList(tab, getAllPartitions(tab), null, false); } LOG.debug("tabname = " + tab.getTableName() + " is partitioned"); @@ -279,18 +284,19 @@ private static PrunedPartitionList getPartitionsFromServer(Table tab, if (prunerExpr == null) { // Non-strict mode, and there is no predicates at all - get everything. - return new PrunedPartitionList(tab, getAllPartitions(tab), false); + return new PrunedPartitionList(tab, getAllPartitions(tab), null, false); } + Set referred = new LinkedHashSet(); // Replace virtual columns with nulls. See javadoc for details. - prunerExpr = removeNonPartCols(prunerExpr, extractPartColNames(tab)); + prunerExpr = removeNonPartCols(prunerExpr, extractPartColNames(tab), referred); // Remove all parts that are not partition columns. See javadoc for details. ExprNodeGenericFuncDesc compactExpr = (ExprNodeGenericFuncDesc)compactExpr(prunerExpr.clone()); String oldFilter = prunerExpr.getExprString(); if (compactExpr == null) { // Non-strict mode, and all the predicates are on non-partition columns - get everything. LOG.debug("Filter " + oldFilter + " was null after compacting"); - return new PrunedPartitionList(tab, getAllPartitions(tab), true); + return new PrunedPartitionList(tab, getAllPartitions(tab), null, true); } LOG.debug("Filter w/ compacting: " + compactExpr.getExprString() @@ -326,6 +332,7 @@ private static PrunedPartitionList getPartitionsFromServer(Table tab, // metastore and so some partitions may have no data based on other filters. boolean isPruningByExactFilter = oldFilter.equals(compactExpr.getExprString()); return new PrunedPartitionList(tab, new LinkedHashSet(partitions), + new ArrayList(referred), hasUnknownPartitions || !isPruningByExactFilter); } catch (HiveException e) { throw e; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java index 74b595a..5e04806 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnAccessAnalyzer.java @@ -22,8 +22,8 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.exec.TableScanOperator; +import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table; public class ColumnAccessAnalyzer { @@ -44,9 +44,23 @@ public ColumnAccessInfo analyzeColumnAccess() throws SemanticException { for (TableScanOperator op : topOps.keySet()) { Table table = topOps.get(op); String tableName = table.getCompleteName(); - List tableCols = table.getCols(); - for (int i : op.getNeededColumnIDs()) { - columnAccessInfo.add(tableName, tableCols.get(i).getName()); + List referenced = op.getReferencedColumns(); + for (String column : referenced) { + columnAccessInfo.add(tableName, column); + } + if (table.isPartitioned()) { + PrunedPartitionList parts; + try { + parts = pGraphContext.getPrunedPartitions(table.getTableName(), op); + } catch (HiveException e) { + LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e)); + throw new SemanticException(e.getMessage(), e); + } + if (parts.getReferredPartCols() != null) { + for (String partKey : parts.getReferredPartCols()) { + columnAccessInfo.add(tableName, partKey); + } + } } } return columnAccessInfo; diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java index c26be3c..9fcc1b2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/ProcessAnalyzeTable.java @@ -28,7 +28,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.DriverContext; -import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.TableScanOperator; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.exec.TaskFactory; @@ -40,12 +39,6 @@ import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils; -import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec; -import org.apache.hadoop.hive.ql.parse.GenTezWork; -import org.apache.hadoop.hive.ql.parse.ParseContext; -import org.apache.hadoop.hive.ql.parse.PrunedPartitionList; -import org.apache.hadoop.hive.ql.parse.QBParseInfo; -import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.ql.plan.MapWork; import org.apache.hadoop.hive.ql.plan.StatsNoJobWork; import org.apache.hadoop.hive.ql.plan.TezWork; @@ -150,7 +143,8 @@ public Object process(Node nd, Stack stack, PrunedPartitionList partitions = null; if (confirmedPartns.size() > 0) { Table source = queryBlock.getMetaData().getTableForAlias(alias); - partitions = new PrunedPartitionList(source, confirmedPartns, false); + List partCols = GenMapRedUtils.getPartitionColumns(parseInfo); + partitions = new PrunedPartitionList(source, confirmedPartns, partCols, false); } MapWork w = utils.createMapWork(context, tableScan, tezWork, partitions); diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java index d3268dd..da2e1e2 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/PrunedPartitionList.java @@ -36,11 +36,16 @@ /** Partitions that either satisfy the partition criteria, or may satisfy it. */ private Set partitions; + /** partition columns referred by pruner expr */ + private List referred; + /** Whether there are partitions in the list that may or may not satisfy the criteria. */ private boolean hasUnknowns; - public PrunedPartitionList(Table source, Set partitions, boolean hasUnknowns) { + public PrunedPartitionList(Table source, Set partitions, List referred, + boolean hasUnknowns) { this.source = source; + this.referred = referred; this.partitions = partitions; this.hasUnknowns = hasUnknowns; } @@ -70,4 +75,8 @@ public Table getSourceTable() { public boolean hasUnknownPartitions() { return hasUnknowns; } + + public List getReferredPartCols() { + return referred; + } } diff --git ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java index a7cec5d..a4ba4bd 100644 --- ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java +++ ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java @@ -531,7 +531,7 @@ public tableSpec getTableSpec(String tName) { } /** - * This method is used only for the anlayze command to get the partition specs + * This method is used only for the analyze command to get the partition specs */ public tableSpec getTableSpec() { diff --git ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java index 1642d6f..4d8132a 100644 --- ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java +++ ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java @@ -23,6 +23,7 @@ import java.util.List; import java.util.Map; +import org.apache.hadoop.hive.ql.exec.PTFUtils; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; /** @@ -34,6 +35,11 @@ public class TableScanDesc extends AbstractOperatorDesc { private static final long serialVersionUID = 1L; + static { + PTFUtils.makeTransient(TableScanDesc.class, + "filterObject", "neededColumnIDs", "neededColumns", "referencedColumns"); + } + private String alias; private List virtualCols; @@ -64,6 +70,20 @@ private ExprNodeGenericFuncDesc filterExpr; private transient Serializable filterObject; + // This 'neededColumnIDs' field is included in this operator class instead of + // its desc class.The reason is that 1)tableScanDesc can not be instantiated, + // and 2) it will fail some join and union queries if this is added forcibly + // into tableScanDesc. + // Both neededColumnIDs and neededColumns should never be null. + // When neededColumnIDs is an empty list, + // it means no needed column (e.g. we do not need any column to evaluate + // SELECT count(*) FROM t). + transient List neededColumnIDs; + transient List neededColumns; + + // all column names referenced including virtual columns. used in ColumnAccessAnalyzer + transient List referencedColumns; + public static final String FILTER_EXPR_CONF_STR = "hive.io.filter.expr.serialized"; @@ -125,6 +145,30 @@ public void setFilterObject(Serializable filterObject) { this.filterObject = filterObject; } + public void setNeededColumnIDs(List orign_columns) { + neededColumnIDs = orign_columns; + } + + public List getNeededColumnIDs() { + return neededColumnIDs; + } + + public void setNeededColumns(List columnNames) { + neededColumns = columnNames; + } + + public List getNeededColumns() { + return neededColumns; + } + + public void setReferencedColumns(List referencedColumns) { + this.referencedColumns = referencedColumns; + } + + public List getReferencedColumns() { + return referencedColumns; + } + public void setAlias(String alias) { this.alias = alias; } diff --git ql/src/test/queries/clientpositive/column_access_stats.q ql/src/test/queries/clientpositive/column_access_stats.q index fbf8bba..4f43403 100644 --- ql/src/test/queries/clientpositive/column_access_stats.q +++ ql/src/test/queries/clientpositive/column_access_stats.q @@ -160,3 +160,8 @@ FROM JOIN T3 ON T3.key = T4.key ORDER BY T3.key, T4.key; + +-- for partitioned table +SELECT * FROM srcpart TABLESAMPLE (10 ROWS); +SELECT key,ds FROM srcpart TABLESAMPLE (10 ROWS) WHERE hr='11'; +SELECT value FROM srcpart TABLESAMPLE (10 ROWS) WHERE ds='2008-04-08'; diff --git ql/src/test/results/clientpositive/column_access_stats.q.out ql/src/test/results/clientpositive/column_access_stats.q.out index d0cd195..ddb9d13 100644 --- ql/src/test/results/clientpositive/column_access_stats.q.out +++ ql/src/test/results/clientpositive/column_access_stats.q.out @@ -58,21 +58,21 @@ PREHOOK: type: QUERY PREHOOK: Input: default@t4 #### A masked pattern was here #### Table:default@t4 -Columns:key,val +Columns:key,p,val PREHOOK: query: SELECT val FROM T4 where p=1 PREHOOK: type: QUERY PREHOOK: Input: default@t4 #### A masked pattern was here #### Table:default@t4 -Columns:val +Columns:p,val PREHOOK: query: SELECT p, val FROM T4 where p=1 PREHOOK: type: QUERY PREHOOK: Input: default@t4 #### A masked pattern was here #### Table:default@t4 -Columns:val +Columns:p,val PREHOOK: query: -- More complicated select queries EXPLAIN SELECT key FROM (SELECT key, val FROM T1) subq1 ORDER BY key @@ -950,3 +950,63 @@ Columns:key,val 7 7 17.0 8 8 46.0 8 8 46.0 +PREHOOK: query: -- for partitioned table +SELECT * FROM srcpart TABLESAMPLE (10 ROWS) +PREHOOK: type: QUERY +PREHOOK: Input: default@srcpart +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 +PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=12 +#### A masked pattern was here #### +Table:default@srcpart +Columns:ds,hr,key,value + +238 val_238 2008-04-08 11 +86 val_86 2008-04-08 11 +311 val_311 2008-04-08 11 +27 val_27 2008-04-08 11 +165 val_165 2008-04-08 11 +409 val_409 2008-04-08 11 +255 val_255 2008-04-08 11 +278 val_278 2008-04-08 11 +98 val_98 2008-04-08 11 +484 val_484 2008-04-08 11 +PREHOOK: query: SELECT key,ds FROM srcpart TABLESAMPLE (10 ROWS) WHERE hr='11' +PREHOOK: type: QUERY +PREHOOK: Input: default@srcpart +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: default@srcpart@ds=2008-04-09/hr=11 +#### A masked pattern was here #### +Table:default@srcpart +Columns:ds,hr,key + +238 2008-04-08 +86 2008-04-08 +311 2008-04-08 +27 2008-04-08 +165 2008-04-08 +409 2008-04-08 +255 2008-04-08 +278 2008-04-08 +98 2008-04-08 +484 2008-04-08 +PREHOOK: query: SELECT value FROM srcpart TABLESAMPLE (10 ROWS) WHERE ds='2008-04-08' +PREHOOK: type: QUERY +PREHOOK: Input: default@srcpart +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Input: default@srcpart@ds=2008-04-08/hr=12 +#### A masked pattern was here #### +Table:default@srcpart +Columns:ds,value + +val_238 +val_86 +val_311 +val_27 +val_165 +val_409 +val_255 +val_278 +val_98 +val_484 diff --git ql/src/test/results/compiler/plan/case_sensitivity.q.xml ql/src/test/results/compiler/plan/case_sensitivity.q.xml index 7de7b4f..5020198 100644 --- ql/src/test/results/compiler/plan/case_sensitivity.q.xml +++ ql/src/test/results/compiler/plan/case_sensitivity.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_5 + + + @@ -1080,6 +1083,16 @@ TS_0 + + + + lint + + + lintstring + + + diff --git ql/src/test/results/compiler/plan/cast1.q.xml ql/src/test/results/compiler/plan/cast1.q.xml index 41d3e7c..e221210 100644 --- ql/src/test/results/compiler/plan/cast1.q.xml +++ ql/src/test/results/compiler/plan/cast1.q.xml @@ -952,6 +952,13 @@ TS_0 + + + + key + + + diff --git ql/src/test/results/compiler/plan/groupby1.q.xml ql/src/test/results/compiler/plan/groupby1.q.xml index 284f0a3..45c9180 100755 --- ql/src/test/results/compiler/plan/groupby1.q.xml +++ ql/src/test/results/compiler/plan/groupby1.q.xml @@ -891,6 +891,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/groupby2.q.xml ql/src/test/results/compiler/plan/groupby2.q.xml index c8a178d..54f3be1 100755 --- ql/src/test/results/compiler/plan/groupby2.q.xml +++ ql/src/test/results/compiler/plan/groupby2.q.xml @@ -982,6 +982,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/groupby3.q.xml ql/src/test/results/compiler/plan/groupby3.q.xml index b88aa68..3ca53f7 100644 --- ql/src/test/results/compiler/plan/groupby3.q.xml +++ ql/src/test/results/compiler/plan/groupby3.q.xml @@ -1190,6 +1190,13 @@ TS_0 + + + + value + + + diff --git ql/src/test/results/compiler/plan/groupby4.q.xml ql/src/test/results/compiler/plan/groupby4.q.xml index cb1a99b..d0ae369 100644 --- ql/src/test/results/compiler/plan/groupby4.q.xml +++ ql/src/test/results/compiler/plan/groupby4.q.xml @@ -624,6 +624,13 @@ TS_0 + + + + key + + + diff --git ql/src/test/results/compiler/plan/groupby5.q.xml ql/src/test/results/compiler/plan/groupby5.q.xml index 2fb8f20..4322904 100644 --- ql/src/test/results/compiler/plan/groupby5.q.xml +++ ql/src/test/results/compiler/plan/groupby5.q.xml @@ -743,6 +743,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/groupby6.q.xml ql/src/test/results/compiler/plan/groupby6.q.xml index c2d9a36..fc6423b 100644 --- ql/src/test/results/compiler/plan/groupby6.q.xml +++ ql/src/test/results/compiler/plan/groupby6.q.xml @@ -624,6 +624,13 @@ TS_0 + + + + value + + + diff --git ql/src/test/results/compiler/plan/input1.q.xml ql/src/test/results/compiler/plan/input1.q.xml index 2cf7a1d..b559574 100755 --- ql/src/test/results/compiler/plan/input1.q.xml +++ ql/src/test/results/compiler/plan/input1.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_5 + + + @@ -1017,6 +1020,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/input2.q.xml ql/src/test/results/compiler/plan/input2.q.xml index 7ecd903..116adee 100755 --- ql/src/test/results/compiler/plan/input2.q.xml +++ ql/src/test/results/compiler/plan/input2.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_10 + + + @@ -560,7 +563,7 @@ Stage-15 - + @@ -777,7 +780,7 @@ - + key @@ -789,6 +792,9 @@ TS_12 + + + @@ -1020,7 +1026,7 @@ - + @@ -1046,7 +1052,7 @@ Stage-21 - + @@ -1271,7 +1277,7 @@ - + key @@ -1283,6 +1289,9 @@ TS_14 + + + @@ -1527,7 +1536,7 @@ - + @@ -2524,6 +2533,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/input20.q.xml ql/src/test/results/compiler/plan/input20.q.xml index 7915f38..0543f32 100644 --- ql/src/test/results/compiler/plan/input20.q.xml +++ ql/src/test/results/compiler/plan/input20.q.xml @@ -1,1388 +1,1395 @@ - -#### A masked pattern was here #### - - - Stage-1 - - - - - true - - - - - tmap:src - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - - - - name - default.src - - - columns.types - string:string - - - serialization.ddl - struct src { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - columns.comments - defaultdefault - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - location - #### A masked pattern was here #### - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - name - default.src - - - numFiles - 1 - - - columns.types - string:string - - - serialization.ddl - struct src { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - rawDataSize - 0 - - - columns.comments - defaultdefault - - - numRows - 0 - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - COLUMN_STATS_ACCURATE - true - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - totalSize - 5812 - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - location - #### A masked pattern was here #### - - - transient_lastDdlTime - #### A masked pattern was here #### - - - - - - - - - - - tmap:src - - - - - - - - - - - - - - _col1 - - - _col1 - - - - - string - - - - - - - _col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - reducesinkkey0 - - - serialization.lib - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - serialization.sort.order - + - - - columns.types - string - - - - - - - -1 - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - _col0 - - - - - - - - - - -1 - - - - - - - - - - - - - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - RS_3 - - - - - - - - - - - - - - key - - - _col0 - - - - - - string - - - - - - - value - - - _col1 - - - - - - string - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.exec.TextRecordReader - - - org.apache.hadoop.hive.ql.exec.TextRecordWriter - - - org.apache.hadoop.hive.ql.exec.TextRecordReader - - - cat - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - KEY - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 9 - - - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.DelimitedJSONSerDe - - - serialization.format - 9 - - - columns.types - double,double - - - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 9 - - - columns.types - string,string - - - serialization.last.column.takes.rest - true - - - - - - - - - SCR_2 - - - - - - - - - - - - - - - - - - - - _col1 - - - - - - - key - - - src - - - - - - - - - - - - int - - - - - 5 - - - - - - - - - false - - - - - - - double - - - - - - - _col0 - - - - - - - key - - - src - - - - - - - - - - - - - 2 - - - - - - - - - false - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - - - - - SEL_1 - - - - - - - - - - - - - - _col0 - - - - - - double - - - - - - - _col1 - - - - - - double - - - - - - - - - - - - - - src - - - - - - - - - - 0 - - - - - - - key - - - - - TS_0 - - - - - - - - - key - - - src - - - - - - string - - - - - - - value - - - src - - - - - - string - - - - - - - true - - - BLOCK__OFFSET__INSIDE__FILE - - - src - - - - - bigint - - - - - bigint - - - - - - - true - - - INPUT__FILE__NAME - - - src - - - - - - string - - - - - - - - - - - - - #### A masked pattern was here #### - - - tmap:src - - - - - - - #### A masked pattern was here #### - - - src - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - - - - name - default.src - - - numFiles - 1 - - - columns.types - string:string - - - serialization.ddl - struct src { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - rawDataSize - 0 - - - columns.comments - defaultdefault - - - numRows - 0 - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - COLUMN_STATS_ACCURATE - true - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - totalSize - 5812 - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - location - #### A masked pattern was here #### - - - transient_lastDdlTime - #### A masked pattern was here #### - - - - - - - - - - - - - - - - - -1 - - - - - - - - - - - - - - - - - #### A masked pattern was here #### - - - - - NONE - - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - hive.serialization.extend.nesting.levels - true - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 1 - - - columns.types - string:string - - - escape.delim - \ - - - - - - - 1 - - - - - FS_7 - - - - - - - - - - - - - - _col0 - - - - - - - - - string - - - - - - - _col1 - - - - - - - - - string - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.exec.TextRecordReader - - - org.apache.hadoop.hive.ql.exec.TextRecordWriter - - - org.apache.hadoop.hive.ql.exec.TextRecordReader - - - uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@" - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - KEY - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 9 - - - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.DelimitedJSONSerDe - - - serialization.format - 9 - - - columns.types - string,string - - - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 9 - - - columns.types - string,string - - - - - - - - - SCR_6 - - - - - - - - - - - - - - key - - - _col0 - - - - - - string - - - - - - - value - - - _col1 - - - - - - string - - - - - - - - - - - - - - _col1 - - - _col1 - - - tmap - - - - - - - - _col0 - - - _col0 - - - tmap - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - - - - - SEL_5 - - - - - - - - - - - - - - _col0 - - - - - - string - - - - - - - _col1 - - - - - - string - - - - - - - - - - - - - - - - VALUE - - - - - - - - - - - - - EX_4 - - - - - - - - - - - - - - _col0 - - - tmap - - - - - - string - - - - - - - _col1 - - - tmap - - - - - - string - - - - - - - - - - - - - - - - - - - - + +#### A masked pattern was here #### + + + Stage-1 + + + + + true + + + + + tmap:src + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + + + + name + default.src + + + columns.types + string:string + + + serialization.ddl + struct src { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + columns.comments + defaultdefault + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + location + #### A masked pattern was here #### + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + name + default.src + + + numFiles + 1 + + + columns.types + string:string + + + serialization.ddl + struct src { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + rawDataSize + 0 + + + columns.comments + defaultdefault + + + numRows + 0 + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + COLUMN_STATS_ACCURATE + true + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + totalSize + 5812 + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + location + #### A masked pattern was here #### + + + transient_lastDdlTime + #### A masked pattern was here #### + + + + + + + + + + + tmap:src + + + + + + + + + + + + + + _col1 + + + _col1 + + + + + string + + + + + + + _col0 + + + _col0 + + + + + + + + + + + + + + + + + + + _col0 + + + + + + + + + + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + reducesinkkey0 + + + serialization.lib + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + serialization.sort.order + + + + + columns.types + string + + + + + + + -1 + + + 1 + + + -1 + + + + + reducesinkkey0 + + + + + + + _col0 + + + _col1 + + + + + + + + + _col0 + + + + + + + + + + -1 + + + + + + + + + + + + + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + columns.types + string,string + + + escape.delim + \ + + + + + + + + + RS_3 + + + + + + + + + + + + + + key + + + _col0 + + + + + + string + + + + + + + value + + + _col1 + + + + + + string + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.exec.TextRecordReader + + + org.apache.hadoop.hive.ql.exec.TextRecordWriter + + + org.apache.hadoop.hive.ql.exec.TextRecordReader + + + cat + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + KEY + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 9 + + + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.DelimitedJSONSerDe + + + serialization.format + 9 + + + columns.types + double,double + + + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 9 + + + columns.types + string,string + + + serialization.last.column.takes.rest + true + + + + + + + + + SCR_2 + + + + + + + + + + + + + + + + + + + + _col1 + + + + + + + key + + + src + + + + + + + + + + + + int + + + + + 5 + + + + + + + + + false + + + + + + + double + + + + + + + _col0 + + + + + + + key + + + src + + + + + + + + + + + + + 2 + + + + + + + + + false + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + + + SEL_1 + + + + + + + + + + + + + + _col0 + + + + + + double + + + + + + + _col1 + + + + + + double + + + + + + + + + + + + + + src + + + + + + + + + + 0 + + + + + + + key + + + + + TS_0 + + + + + key + + + + + + + + + + + key + + + src + + + + + + string + + + + + + + value + + + src + + + + + + string + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + src + + + + + bigint + + + + + bigint + + + + + + + true + + + INPUT__FILE__NAME + + + src + + + + + + string + + + + + + + + + + + + + #### A masked pattern was here #### + + + tmap:src + + + + + + + #### A masked pattern was here #### + + + src + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + + + + name + default.src + + + numFiles + 1 + + + columns.types + string:string + + + serialization.ddl + struct src { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + rawDataSize + 0 + + + columns.comments + defaultdefault + + + numRows + 0 + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + COLUMN_STATS_ACCURATE + true + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + totalSize + 5812 + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + location + #### A masked pattern was here #### + + + transient_lastDdlTime + #### A masked pattern was here #### + + + + + + + + + + + + + + + + + -1 + + + + + + + + + + + + + + + + + #### A masked pattern was here #### + + + + + NONE + + + + 1 + + + #### A masked pattern was here #### + + + true + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + hive.serialization.extend.nesting.levels + true + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 1 + + + columns.types + string:string + + + escape.delim + \ + + + + + + + 1 + + + + + FS_7 + + + + + + + + + + + + + + _col0 + + + + + + + + + string + + + + + + + _col1 + + + + + + + + + string + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.exec.TextRecordReader + + + org.apache.hadoop.hive.ql.exec.TextRecordWriter + + + org.apache.hadoop.hive.ql.exec.TextRecordReader + + + uniq -c | sed "s@^ *@@" | sed "s@\t@_@" | sed "s@ @\t@" + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + KEY + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 9 + + + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.DelimitedJSONSerDe + + + serialization.format + 9 + + + columns.types + string,string + + + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 9 + + + columns.types + string,string + + + + + + + + + SCR_6 + + + + + + + + + + + + + + key + + + _col0 + + + + + + string + + + + + + + value + + + _col1 + + + + + + string + + + + + + + + + + + + + + _col1 + + + _col1 + + + tmap + + + + + + + + _col0 + + + _col0 + + + tmap + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + + + SEL_5 + + + + + + + + + + + + + + _col0 + + + + + + string + + + + + + + _col1 + + + + + + string + + + + + + + + + + + + + + + + VALUE + + + + + + + + + + + + + EX_4 + + + + + + + + + + + + + + _col0 + + + tmap + + + + + + string + + + + + + + _col1 + + + tmap + + + + + + string + + + + + + + + + + + + + + + + + + + + diff --git ql/src/test/results/compiler/plan/input3.q.xml ql/src/test/results/compiler/plan/input3.q.xml index 5d3d5a6..ec78b6d 100755 --- ql/src/test/results/compiler/plan/input3.q.xml +++ ql/src/test/results/compiler/plan/input3.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_13 + + + @@ -560,7 +563,7 @@ Stage-16 - + @@ -777,7 +780,7 @@ - + key @@ -789,6 +792,9 @@ TS_15 + + + @@ -1079,7 +1085,7 @@ - + @@ -1105,7 +1111,7 @@ Stage-22 - + @@ -1330,7 +1336,7 @@ - + key @@ -1342,6 +1348,9 @@ TS_17 + + + @@ -1649,7 +1658,7 @@ - + @@ -1675,7 +1684,7 @@ Stage-27 - + @@ -1806,7 +1815,7 @@ - + _col0 @@ -1815,6 +1824,9 @@ TS_19 + + + @@ -1992,7 +2004,7 @@ #### A masked pattern was here #### - + @@ -3231,6 +3243,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/input4.q.xml ql/src/test/results/compiler/plan/input4.q.xml index 15dd6cc..6ac89f2 100755 --- ql/src/test/results/compiler/plan/input4.q.xml +++ ql/src/test/results/compiler/plan/input4.q.xml @@ -1,1352 +1,1362 @@ - -#### A masked pattern was here #### - - - - - - - - - - - Stage-2 - - - - - - - - - - - - #### A masked pattern was here #### - - - - - - - - - - - - - - - - - - - Stage-0 - - - - - - - - - - - - - - - - HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME - - - HIVE_DEFAULT_LIST_BUCKETING_KEY - - - - - - - - - - - - - - - - - true - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - name - default.dest1 - - - columns.types - string:string - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - columns.comments - defaultdefault - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - location - #### A masked pattern was here #### - - - transient_lastDdlTime - #### A masked pattern was here #### - - - - - - - - - - - - - - - Stage-1 - - - - - true - - - - - tmap:src - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - - - - name - default.src - - - columns.types - string:string - - - serialization.ddl - struct src { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - columns.comments - defaultdefault - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - location - #### A masked pattern was here #### - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - name - default.src - - - numFiles - 1 - - - columns.types - string:string - - - serialization.ddl - struct src { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - rawDataSize - 0 - - - columns.comments - defaultdefault - - - numRows - 0 - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - COLUMN_STATS_ACCURATE - true - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - totalSize - 5812 - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - location - #### A masked pattern was here #### - - - transient_lastDdlTime - #### A masked pattern was here #### - - - - - - - - - - - tmap:src - - - - - - - - - - - - - - - - - _col1 - - - _col1 - - - - - string - - - - - - - _col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - reducesinkkey0 - - - serialization.lib - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - serialization.sort.order - + - - - columns.types - string - - - - - - - -1 - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - _col0 - - - - - - - - - - -1 - - - - - - - - - - - - - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - RS_3 - - - - - - - - - - - - - - tkey - - - _col0 - - - - - - string - - - - - - - tvalue - - - _col1 - - - - - - string - - - - - - - - - - - - - - - - - - - - - - - - - int - - - - - 100 - - - - - - - - - - - - boolean - - - - - - - - - FIL_8 - - - - - - - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.exec.TextRecordReader - - - org.apache.hadoop.hive.ql.exec.TextRecordWriter - - - org.apache.hadoop.hive.ql.exec.TextRecordReader - - - /bin/cat - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - KEY - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 9 - - - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.DelimitedJSONSerDe - - - serialization.format - 9 - - - columns.types - string,string - - - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 9 - - - columns.types - string,string - - - - - - - - - SCR_2 - - - - - - - - - - - - - - - - - - - - _col1 - - - value - - - src - - - - - - - - _col0 - - - key - - - src - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - - - - - SEL_1 - - - - - - - - - - - - - - _col0 - - - - - - string - - - - - - - _col1 - - - - - - string - - - - - - - - - - - - - - src - - - - - - - - - - 0 - - - 1 - - - - - - - key - - - value - - - - - TS_0 - - - - - - - - - key - - - src - - - - - - string - - - - - - - value - - - src - - - - - - string - - - - - - - true - - - BLOCK__OFFSET__INSIDE__FILE - - - src - - - - - bigint - - - - - bigint - - - - - - - true - - - INPUT__FILE__NAME - - - src - - - - - - string - - - - - - - - - - - - true - - - - #### A masked pattern was here #### - - - tmap:src - - - - - - - #### A masked pattern was here #### - - - src - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - - - - name - default.src - - - numFiles - 1 - - - columns.types - string:string - - - serialization.ddl - struct src { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - rawDataSize - 0 - - - columns.comments - defaultdefault - - - numRows - 0 - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - COLUMN_STATS_ACCURATE - true - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - totalSize - 5812 - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - location - #### A masked pattern was here #### - - - transient_lastDdlTime - #### A masked pattern was here #### - - - - - - - - - - - - - - true - - - - - - -1 - - - - - - - - - - - - - 1 - - - - #### A masked pattern was here #### - - - - - NONE - - - - true - - - - - - 150 - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - - 1 - - - - - FS_7 - - - - - - - - - - - - - - key - - - - - - - - - string - - - - - - - value - - - - - - - - - string - - - - - - - - - - - - - - _col1 - - - _col1 - - - tmap - - - - - - - - _col0 - - - _col0 - - - tmap - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - - - - - SEL_6 - - - - - - - - - - - - - - _col0 - - - tmap - - - - - - string - - - - - - - _col1 - - - tmap - - - - - - string - - - - - - - - - - - - - - - - VALUE - - - - - - - - - - - - - EX_4 - - - - - - - - - - - - - - _col0 - - - tmap - - - - - - string - - - - - - - _col1 - - - tmap - - - - - - string - - - - - - - - - - - - - - - - - - - - + +#### A masked pattern was here #### + + + + + + + + + + + Stage-2 + + + + + + + + + + + + #### A masked pattern was here #### + + + + + + + + + + + + + + + + + + + Stage-0 + + + + + + + + + + + + + + + + HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME + + + HIVE_DEFAULT_LIST_BUCKETING_KEY + + + + + + + + + + + + + + + + + true + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + name + default.dest1 + + + columns.types + string:string + + + serialization.ddl + struct dest1 { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + columns.comments + defaultdefault + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + location + #### A masked pattern was here #### + + + transient_lastDdlTime + #### A masked pattern was here #### + + + + + + + + + + + + + + + Stage-1 + + + + + true + + + + + tmap:src + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + + + + name + default.src + + + columns.types + string:string + + + serialization.ddl + struct src { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + columns.comments + defaultdefault + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + location + #### A masked pattern was here #### + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + name + default.src + + + numFiles + 1 + + + columns.types + string:string + + + serialization.ddl + struct src { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + rawDataSize + 0 + + + columns.comments + defaultdefault + + + numRows + 0 + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + COLUMN_STATS_ACCURATE + true + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + totalSize + 5812 + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + location + #### A masked pattern was here #### + + + transient_lastDdlTime + #### A masked pattern was here #### + + + + + + + + + + + tmap:src + + + + + + + + + + + + + + + + + _col1 + + + _col1 + + + + + string + + + + + + + _col0 + + + _col0 + + + + + + + + + + + + + + + + + + + _col0 + + + + + + + + + + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + reducesinkkey0 + + + serialization.lib + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + serialization.sort.order + + + + + columns.types + string + + + + + + + -1 + + + 1 + + + -1 + + + + + reducesinkkey0 + + + + + + + _col0 + + + _col1 + + + + + + + + + _col0 + + + + + + + + + + -1 + + + + + + + + + + + + + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + columns.types + string,string + + + escape.delim + \ + + + + + + + + + RS_3 + + + + + + + + + + + + + + tkey + + + _col0 + + + + + + string + + + + + + + tvalue + + + _col1 + + + + + + string + + + + + + + + + + + + + + + + + + + + + + + + + int + + + + + 100 + + + + + + + + + + + + boolean + + + + + + + + + FIL_8 + + + + + + + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.exec.TextRecordReader + + + org.apache.hadoop.hive.ql.exec.TextRecordWriter + + + org.apache.hadoop.hive.ql.exec.TextRecordReader + + + /bin/cat + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + KEY + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 9 + + + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.DelimitedJSONSerDe + + + serialization.format + 9 + + + columns.types + string,string + + + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 9 + + + columns.types + string,string + + + + + + + + + SCR_2 + + + + + + + + + + + + + + + + + + + + _col1 + + + value + + + src + + + + + + + + _col0 + + + key + + + src + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + + + SEL_1 + + + + + + + + + + + + + + _col0 + + + + + + string + + + + + + + _col1 + + + + + + string + + + + + + + + + + + + + + src + + + + + + + + + + 0 + + + 1 + + + + + + + key + + + value + + + + + TS_0 + + + + + key + + + value + + + + + + + + + + + key + + + src + + + + + + string + + + + + + + value + + + src + + + + + + string + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + src + + + + + bigint + + + + + bigint + + + + + + + true + + + INPUT__FILE__NAME + + + src + + + + + + string + + + + + + + + + + + + true + + + + #### A masked pattern was here #### + + + tmap:src + + + + + + + #### A masked pattern was here #### + + + src + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + + + + name + default.src + + + numFiles + 1 + + + columns.types + string:string + + + serialization.ddl + struct src { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + rawDataSize + 0 + + + columns.comments + defaultdefault + + + numRows + 0 + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + COLUMN_STATS_ACCURATE + true + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + totalSize + 5812 + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + location + #### A masked pattern was here #### + + + transient_lastDdlTime + #### A masked pattern was here #### + + + + + + + + + + + + + + true + + + + + + -1 + + + + + + + + + + + + + 1 + + + + #### A masked pattern was here #### + + + + + NONE + + + + true + + + + + + 150 + + + 1 + + + #### A masked pattern was here #### + + + true + + + + + + 1 + + + + + FS_7 + + + + + + + + + + + + + + key + + + + + + + + + string + + + + + + + value + + + + + + + + + string + + + + + + + + + + + + + + _col1 + + + _col1 + + + tmap + + + + + + + + _col0 + + + _col0 + + + tmap + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + + + SEL_6 + + + + + + + + + + + + + + _col0 + + + tmap + + + + + + string + + + + + + + _col1 + + + tmap + + + + + + string + + + + + + + + + + + + + + + + VALUE + + + + + + + + + + + + + EX_4 + + + + + + + + + + + + + + _col0 + + + tmap + + + + + + string + + + + + + + _col1 + + + tmap + + + + + + string + + + + + + + + + + + + + + + + + + + + diff --git ql/src/test/results/compiler/plan/input5.q.xml ql/src/test/results/compiler/plan/input5.q.xml index 2f06f1e..5bcc577 100644 --- ql/src/test/results/compiler/plan/input5.q.xml +++ ql/src/test/results/compiler/plan/input5.q.xml @@ -1,1418 +1,1428 @@ - -#### A masked pattern was here #### - - - - - - - - - - - Stage-2 - - - - - - - - - - - - #### A masked pattern was here #### - - - - - - - - - - - - - - - - - - - Stage-0 - - - - - - - - - - - - - - - - HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME - - - HIVE_DEFAULT_LIST_BUCKETING_KEY - - - - - - - - - - - - - - - - - true - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - name - default.dest1 - - - columns.types - string:string - - - serialization.ddl - struct dest1 { string key, string value} - - - serialization.format - 1 - - - columns - key,value - - - columns.comments - defaultdefault - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - file.inputformat - org.apache.hadoop.mapred.TextInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - location - #### A masked pattern was here #### - - - transient_lastDdlTime - #### A masked pattern was here #### - - - - - - - - - - - - - - - Stage-1 - - - - - true - - - - - tmap:src_thrift - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - - - - name - default.src_thrift - - - columns.types - - - - serialization.ddl - struct src_thrift { } - - - columns - - - - serialization.format - org.apache.thrift.protocol.TBinaryProtocol - - - columns.comments - - - - serialization.class - org.apache.hadoop.hive.serde2.thrift.test.Complex - - - bucket_count - -1 - - - serialization.lib - org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer - - - file.inputformat - org.apache.hadoop.mapred.SequenceFileInputFormat - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - location - #### A masked pattern was here #### - - - - - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns.types - - - - location - #### A masked pattern was here #### - - - columns - - - - COLUMN_STATS_ACCURATE - true - - - serialization.format - org.apache.thrift.protocol.TBinaryProtocol - - - numRows - 0 - - - numFiles - 1 - - - serialization.ddl - struct src_thrift { } - - - transient_lastDdlTime - #### A masked pattern was here #### - - - rawDataSize - 0 - - - columns.comments - - - - totalSize - 1606 - - - bucket_count - -1 - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - serialization.lib - org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer - - - serialization.class - org.apache.hadoop.hive.serde2.thrift.test.Complex - - - file.inputformat - org.apache.hadoop.mapred.SequenceFileInputFormat - - - name - default.src_thrift - - - - - - - - - - - tmap:src_thrift - - - - - - - - - - - - - - _col1 - - - _col1 - - - - - string - - - - - - - _col0 - - - _col0 - - - - - - - - - - - - - - - - - - - _col0 - - - - - - - - - - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - reducesinkkey0 - - - serialization.lib - org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe - - - serialization.sort.order - + - - - columns.types - string - - - - - - - -1 - - - 1 - - - -1 - - - - - reducesinkkey0 - - - - - - - _col0 - - - _col1 - - - - - - - - - _col0 - - - - - - - - - - -1 - - - - - - - - - - - - - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - - columns.types - string,string - - - escape.delim - \ - - - - - - - - - RS_3 - - - - - - - - - - - - - - tkey - - - _col0 - - - - - - string - - - - - - - tvalue - - - _col1 - - - - - - string - - - - - - - - - - - - - - - - - org.apache.hadoop.hive.ql.exec.TextRecordReader - - - org.apache.hadoop.hive.ql.exec.TextRecordWriter - - - org.apache.hadoop.hive.ql.exec.TextRecordReader - - - /bin/cat - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - KEY - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 9 - - - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.DelimitedJSONSerDe - - - serialization.format - 9 - - - columns.types - array<int>,array<struct<myint:int,mystring:string,underscore_int:int>> - - - - - - - - - org.apache.hadoop.mapred.TextInputFormat - - - org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat - - - - - field.delim - 9 - - - columns - _col0,_col1 - - - serialization.lib - org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - - serialization.format - 9 - - - columns.types - string,string - - - - - - - - - SCR_2 - - - - - - - - - - - - - - - - - - - - _col1 - - - lintstring - - - src_thrift - - - - - - - - - myint - - - mystring - - - underscore_int - - - - - - - - - int - - - - - - - - - - - - - - - - - - - _col0 - - - lint - - - src_thrift - - - - - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - - - - - SEL_1 - - - - - - - - - - - - - - _col0 - - - - - - array<int> - - - - - - - _col1 - - - - - - array<struct<myint:int,mystring:string,underscore_int:int>> - - - - - - - - - - - - - - src_thrift - - - - - - - - - - 2 - - - 4 - - - - - - - lint - - - lintstring - - - - - TS_0 - - - - - - - - - aint - - - src_thrift - - - - - - int - - - - - - - astring - - - src_thrift - - - - - - string - - - - - - - lint - - - src_thrift - - - - - - array<int> - - - - - - - lstring - - - src_thrift - - - - - - - - - - array<string> - - - - - - - lintstring - - - src_thrift - - - - - - array<struct<myint:int,mystring:string,underscore_int:int>> - - - - - - - mstringstring - - - src_thrift - - - - - - - - - - - - - map<string,string> - - - - - - - true - - - BLOCK__OFFSET__INSIDE__FILE - - - src_thrift - - - - - bigint - - - - - bigint - - - - - - - true - - - INPUT__FILE__NAME - - - src_thrift - - - - - - string - - - - - - - - - - - - true - - - - #### A masked pattern was here #### - - - tmap:src_thrift - - - - - - - #### A masked pattern was here #### - - - src_thrift - - - org.apache.hadoop.mapred.SequenceFileInputFormat - - - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - - - - - - columns.types - - - - location - #### A masked pattern was here #### - - - columns - - - - COLUMN_STATS_ACCURATE - true - - - serialization.format - org.apache.thrift.protocol.TBinaryProtocol - - - numRows - 0 - - - numFiles - 1 - - - serialization.ddl - struct src_thrift { } - - - transient_lastDdlTime - #### A masked pattern was here #### - - - rawDataSize - 0 - - - columns.comments - - - - totalSize - 1606 - - - bucket_count - -1 - - - file.outputformat - org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - - - serialization.lib - org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer - - - serialization.class - org.apache.hadoop.hive.serde2.thrift.test.Complex - - - file.inputformat - org.apache.hadoop.mapred.SequenceFileInputFormat - - - name - default.src_thrift - - - - - - - - - - - - - - true - - - - - - -1 - - - - - - - - - - - - - 1 - - - - #### A masked pattern was here #### - - - - - NONE - - - - true - - - - - - 150 - - - 1 - - - #### A masked pattern was here #### - - - true - - - - - - 1 - - - - - FS_6 - - - - - - - - - - - - - - key - - - - - - - - - string - - - - - - - value - - - - - - - - - string - - - - - - - - - - - - - - _col1 - - - _col1 - - - tmap - - - - - - - - _col0 - - - _col0 - - - tmap - - - - - - - - - - - - - - - - - - - - - - - - _col0 - - - _col1 - - - - - - - SEL_5 - - - - - - - - - - - - - - _col0 - - - tmap - - - - - - string - - - - - - - _col1 - - - tmap - - - - - - string - - - - - - - - - - - - - - - - VALUE - - - - - - - - - - - - - EX_4 - - - - - - - - - - - - - - _col0 - - - tmap - - - - - - string - - - - - - - _col1 - - - tmap - - - - - - string - - - - - - - - - - - - - - - - - - - - + +#### A masked pattern was here #### + + + + + + + + + + + Stage-2 + + + + + + + + + + + + #### A masked pattern was here #### + + + + + + + + + + + + + + + + + + + Stage-0 + + + + + + + + + + + + + + + + HIVE_DEFAULT_LIST_BUCKETING_DIR_NAME + + + HIVE_DEFAULT_LIST_BUCKETING_KEY + + + + + + + + + + + + + + + + + true + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + name + default.dest1 + + + columns.types + string:string + + + serialization.ddl + struct dest1 { string key, string value} + + + serialization.format + 1 + + + columns + key,value + + + columns.comments + defaultdefault + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + file.inputformat + org.apache.hadoop.mapred.TextInputFormat + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + location + #### A masked pattern was here #### + + + transient_lastDdlTime + #### A masked pattern was here #### + + + + + + + + + + + + + + + Stage-1 + + + + + true + + + + + tmap:src_thrift + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + + + + name + default.src_thrift + + + columns.types + + + + serialization.ddl + struct src_thrift { } + + + columns + + + + serialization.format + org.apache.thrift.protocol.TBinaryProtocol + + + columns.comments + + + + serialization.class + org.apache.hadoop.hive.serde2.thrift.test.Complex + + + bucket_count + -1 + + + serialization.lib + org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer + + + file.inputformat + org.apache.hadoop.mapred.SequenceFileInputFormat + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + location + #### A masked pattern was here #### + + + + + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns.types + + + + location + #### A masked pattern was here #### + + + columns + + + + COLUMN_STATS_ACCURATE + true + + + serialization.format + org.apache.thrift.protocol.TBinaryProtocol + + + numRows + 0 + + + numFiles + 1 + + + serialization.ddl + struct src_thrift { } + + + transient_lastDdlTime + #### A masked pattern was here #### + + + rawDataSize + 0 + + + columns.comments + + + + totalSize + 1606 + + + bucket_count + -1 + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + serialization.lib + org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer + + + serialization.class + org.apache.hadoop.hive.serde2.thrift.test.Complex + + + file.inputformat + org.apache.hadoop.mapred.SequenceFileInputFormat + + + name + default.src_thrift + + + + + + + + + + + tmap:src_thrift + + + + + + + + + + + + + + _col1 + + + _col1 + + + + + string + + + + + + + _col0 + + + _col0 + + + + + + + + + + + + + + + + + + + _col0 + + + + + + + + + + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + reducesinkkey0 + + + serialization.lib + org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe + + + serialization.sort.order + + + + + columns.types + string + + + + + + + -1 + + + 1 + + + -1 + + + + + reducesinkkey0 + + + + + + + _col0 + + + _col1 + + + + + + + + + _col0 + + + + + + + + + + -1 + + + + + + + + + + + + + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + + + columns.types + string,string + + + escape.delim + \ + + + + + + + + + RS_3 + + + + + + + + + + + + + + tkey + + + _col0 + + + + + + string + + + + + + + tvalue + + + _col1 + + + + + + string + + + + + + + + + + + + + + + + + org.apache.hadoop.hive.ql.exec.TextRecordReader + + + org.apache.hadoop.hive.ql.exec.TextRecordWriter + + + org.apache.hadoop.hive.ql.exec.TextRecordReader + + + /bin/cat + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + KEY + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 9 + + + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.DelimitedJSONSerDe + + + serialization.format + 9 + + + columns.types + array<int>,array<struct<myint:int,mystring:string,underscore_int:int>> + + + + + + + + + org.apache.hadoop.mapred.TextInputFormat + + + org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + + + + + field.delim + 9 + + + columns + _col0,_col1 + + + serialization.lib + org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + + serialization.format + 9 + + + columns.types + string,string + + + + + + + + + SCR_2 + + + + + + + + + + + + + + + + + + + + _col1 + + + lintstring + + + src_thrift + + + + + + + + + myint + + + mystring + + + underscore_int + + + + + + + + + int + + + + + + + + + + + + + + + + + + + _col0 + + + lint + + + src_thrift + + + + + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + + + SEL_1 + + + + + + + + + + + + + + _col0 + + + + + + array<int> + + + + + + + _col1 + + + + + + array<struct<myint:int,mystring:string,underscore_int:int>> + + + + + + + + + + + + + + src_thrift + + + + + + + + + + 2 + + + 4 + + + + + + + lint + + + lintstring + + + + + TS_0 + + + + + lint + + + lintstring + + + + + + + + + + + aint + + + src_thrift + + + + + + int + + + + + + + astring + + + src_thrift + + + + + + string + + + + + + + lint + + + src_thrift + + + + + + array<int> + + + + + + + lstring + + + src_thrift + + + + + + + + + + array<string> + + + + + + + lintstring + + + src_thrift + + + + + + array<struct<myint:int,mystring:string,underscore_int:int>> + + + + + + + mstringstring + + + src_thrift + + + + + + + + + + + + + map<string,string> + + + + + + + true + + + BLOCK__OFFSET__INSIDE__FILE + + + src_thrift + + + + + bigint + + + + + bigint + + + + + + + true + + + INPUT__FILE__NAME + + + src_thrift + + + + + + string + + + + + + + + + + + + true + + + + #### A masked pattern was here #### + + + tmap:src_thrift + + + + + + + #### A masked pattern was here #### + + + src_thrift + + + org.apache.hadoop.mapred.SequenceFileInputFormat + + + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + + + + + + columns.types + + + + location + #### A masked pattern was here #### + + + columns + + + + COLUMN_STATS_ACCURATE + true + + + serialization.format + org.apache.thrift.protocol.TBinaryProtocol + + + numRows + 0 + + + numFiles + 1 + + + serialization.ddl + struct src_thrift { } + + + transient_lastDdlTime + #### A masked pattern was here #### + + + rawDataSize + 0 + + + columns.comments + + + + totalSize + 1606 + + + bucket_count + -1 + + + file.outputformat + org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + + + serialization.lib + org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer + + + serialization.class + org.apache.hadoop.hive.serde2.thrift.test.Complex + + + file.inputformat + org.apache.hadoop.mapred.SequenceFileInputFormat + + + name + default.src_thrift + + + + + + + + + + + + + + true + + + + + + -1 + + + + + + + + + + + + + 1 + + + + #### A masked pattern was here #### + + + + + NONE + + + + true + + + + + + 150 + + + 1 + + + #### A masked pattern was here #### + + + true + + + + + + 1 + + + + + FS_6 + + + + + + + + + + + + + + key + + + + + + + + + string + + + + + + + value + + + + + + + + + string + + + + + + + + + + + + + + _col1 + + + _col1 + + + tmap + + + + + + + + _col0 + + + _col0 + + + tmap + + + + + + + + + + + + + + + + + + + + + + + + _col0 + + + _col1 + + + + + + + SEL_5 + + + + + + + + + + + + + + _col0 + + + tmap + + + + + + string + + + + + + + _col1 + + + tmap + + + + + + string + + + + + + + + + + + + + + + + VALUE + + + + + + + + + + + + + EX_4 + + + + + + + + + + + + + + _col0 + + + tmap + + + + + + string + + + + + + + _col1 + + + tmap + + + + + + string + + + + + + + + + + + + + + + + + + + + diff --git ql/src/test/results/compiler/plan/input6.q.xml ql/src/test/results/compiler/plan/input6.q.xml index a94ee15..39c6c27 100644 --- ql/src/test/results/compiler/plan/input6.q.xml +++ ql/src/test/results/compiler/plan/input6.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_5 + + + @@ -1003,6 +1006,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/input7.q.xml ql/src/test/results/compiler/plan/input7.q.xml index 80aac8d..f0153ae 100644 --- ql/src/test/results/compiler/plan/input7.q.xml +++ ql/src/test/results/compiler/plan/input7.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_3 + + + @@ -903,6 +906,13 @@ TS_0 + + + + key + + + diff --git ql/src/test/results/compiler/plan/input8.q.xml ql/src/test/results/compiler/plan/input8.q.xml index e683312..04734c1 100644 --- ql/src/test/results/compiler/plan/input8.q.xml +++ ql/src/test/results/compiler/plan/input8.q.xml @@ -529,6 +529,13 @@ TS_0 + + + + key + + + diff --git ql/src/test/results/compiler/plan/input9.q.xml ql/src/test/results/compiler/plan/input9.q.xml index 7935d64..c609573 100644 --- ql/src/test/results/compiler/plan/input9.q.xml +++ ql/src/test/results/compiler/plan/input9.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_5 + + + @@ -969,6 +972,13 @@ TS_0 + + + + key + + + diff --git ql/src/test/results/compiler/plan/input_part1.q.xml ql/src/test/results/compiler/plan/input_part1.q.xml index f89afe6..e70d251 100644 --- ql/src/test/results/compiler/plan/input_part1.q.xml +++ ql/src/test/results/compiler/plan/input_part1.q.xml @@ -711,6 +711,22 @@ TS_0 + + + + key + + + value + + + ds + + + hr + + + diff --git ql/src/test/results/compiler/plan/input_testsequencefile.q.xml ql/src/test/results/compiler/plan/input_testsequencefile.q.xml index ca08c02..9b14de9 100644 --- ql/src/test/results/compiler/plan/input_testsequencefile.q.xml +++ ql/src/test/results/compiler/plan/input_testsequencefile.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_3 + + + @@ -855,6 +858,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/input_testxpath.q.xml ql/src/test/results/compiler/plan/input_testxpath.q.xml index 2ba0b5e..0f26e4b 100644 --- ql/src/test/results/compiler/plan/input_testxpath.q.xml +++ ql/src/test/results/compiler/plan/input_testxpath.q.xml @@ -622,6 +622,19 @@ TS_0 + + + + lint + + + lintstring + + + mstringstring + + + diff --git ql/src/test/results/compiler/plan/input_testxpath2.q.xml ql/src/test/results/compiler/plan/input_testxpath2.q.xml index 613752b..c5cdf62 100644 --- ql/src/test/results/compiler/plan/input_testxpath2.q.xml +++ ql/src/test/results/compiler/plan/input_testxpath2.q.xml @@ -739,6 +739,19 @@ TS_0 + + + + lint + + + lintstring + + + mstringstring + + + diff --git ql/src/test/results/compiler/plan/join1.q.xml ql/src/test/results/compiler/plan/join1.q.xml index f5a6123..fc9d49b 100644 --- ql/src/test/results/compiler/plan/join1.q.xml +++ ql/src/test/results/compiler/plan/join1.q.xml @@ -616,6 +616,16 @@ TS_0 + + + + key + + + value + + + @@ -903,6 +913,13 @@ TS_1 + + + + key + + + diff --git ql/src/test/results/compiler/plan/join2.q.xml ql/src/test/results/compiler/plan/join2.q.xml index 92292b7..d97bc53 100644 --- ql/src/test/results/compiler/plan/join2.q.xml +++ ql/src/test/results/compiler/plan/join2.q.xml @@ -545,7 +545,7 @@ - + _col0 @@ -557,6 +557,9 @@ TS_12 + + + @@ -629,7 +632,7 @@ - + @@ -720,7 +723,7 @@ - + 1 @@ -832,6 +835,16 @@ TS_1 + + + + key + + + value + + + @@ -1722,7 +1735,7 @@ - + @@ -1792,7 +1805,7 @@ - + 1 @@ -1898,6 +1911,13 @@ TS_0 + + + + key + + + @@ -2008,7 +2028,7 @@ - + @@ -2078,7 +2098,7 @@ - + @@ -2181,6 +2201,13 @@ TS_2 + + + + key + + + diff --git ql/src/test/results/compiler/plan/join3.q.xml ql/src/test/results/compiler/plan/join3.q.xml index c44132b..2c0fccc 100644 --- ql/src/test/results/compiler/plan/join3.q.xml +++ ql/src/test/results/compiler/plan/join3.q.xml @@ -637,6 +637,13 @@ TS_0 + + + + key + + + @@ -963,6 +970,16 @@ TS_1 + + + + key + + + value + + + @@ -1246,6 +1263,13 @@ TS_2 + + + + key + + + diff --git ql/src/test/results/compiler/plan/join4.q.xml ql/src/test/results/compiler/plan/join4.q.xml index c7257e8..ebdc632 100644 --- ql/src/test/results/compiler/plan/join4.q.xml +++ ql/src/test/results/compiler/plan/join4.q.xml @@ -720,6 +720,16 @@ TS_3 + + + + key + + + value + + + @@ -1267,6 +1277,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/join5.q.xml ql/src/test/results/compiler/plan/join5.q.xml index bc62668..d4c920a 100644 --- ql/src/test/results/compiler/plan/join5.q.xml +++ ql/src/test/results/compiler/plan/join5.q.xml @@ -720,6 +720,16 @@ TS_3 + + + + key + + + value + + + @@ -1267,6 +1277,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/join6.q.xml ql/src/test/results/compiler/plan/join6.q.xml index 1e1866d..453d11e 100644 --- ql/src/test/results/compiler/plan/join6.q.xml +++ ql/src/test/results/compiler/plan/join6.q.xml @@ -720,6 +720,16 @@ TS_3 + + + + key + + + value + + + @@ -1267,6 +1277,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/join7.q.xml ql/src/test/results/compiler/plan/join7.q.xml index 5f65ddb..9e5b95c 100644 --- ql/src/test/results/compiler/plan/join7.q.xml +++ ql/src/test/results/compiler/plan/join7.q.xml @@ -785,6 +785,16 @@ TS_6 + + + + key + + + value + + + @@ -1332,6 +1342,16 @@ TS_0 + + + + key + + + value + + + @@ -1875,6 +1895,16 @@ TS_3 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/join8.q.xml ql/src/test/results/compiler/plan/join8.q.xml index bb9ec1f..4e286d5 100644 --- ql/src/test/results/compiler/plan/join8.q.xml +++ ql/src/test/results/compiler/plan/join8.q.xml @@ -761,6 +761,16 @@ TS_3 + + + + key + + + value + + + @@ -1349,6 +1359,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/sample1.q.xml ql/src/test/results/compiler/plan/sample1.q.xml index be97dbe..3d7b1a7 100644 --- ql/src/test/results/compiler/plan/sample1.q.xml +++ ql/src/test/results/compiler/plan/sample1.q.xml @@ -804,6 +804,22 @@ TS_0 + + + + key + + + value + + + ds + + + hr + + + diff --git ql/src/test/results/compiler/plan/sample2.q.xml ql/src/test/results/compiler/plan/sample2.q.xml index d12bda4..a7511cd 100644 --- ql/src/test/results/compiler/plan/sample2.q.xml +++ ql/src/test/results/compiler/plan/sample2.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_4 + + + @@ -1110,6 +1113,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/sample3.q.xml ql/src/test/results/compiler/plan/sample3.q.xml index 01ae0c5..371131a 100644 --- ql/src/test/results/compiler/plan/sample3.q.xml +++ ql/src/test/results/compiler/plan/sample3.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_4 + + + @@ -1120,6 +1123,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/sample4.q.xml ql/src/test/results/compiler/plan/sample4.q.xml index d12bda4..a7511cd 100644 --- ql/src/test/results/compiler/plan/sample4.q.xml +++ ql/src/test/results/compiler/plan/sample4.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_4 + + + @@ -1110,6 +1113,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/sample5.q.xml ql/src/test/results/compiler/plan/sample5.q.xml index e27ac08..69eb500 100644 --- ql/src/test/results/compiler/plan/sample5.q.xml +++ ql/src/test/results/compiler/plan/sample5.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_4 + + + @@ -1107,6 +1110,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/sample6.q.xml ql/src/test/results/compiler/plan/sample6.q.xml index c496604..b021a47 100644 --- ql/src/test/results/compiler/plan/sample6.q.xml +++ ql/src/test/results/compiler/plan/sample6.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_4 + + + @@ -1110,6 +1113,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/sample7.q.xml ql/src/test/results/compiler/plan/sample7.q.xml index 0b39977..ecadf87 100644 --- ql/src/test/results/compiler/plan/sample7.q.xml +++ ql/src/test/results/compiler/plan/sample7.q.xml @@ -232,7 +232,7 @@ - + key @@ -244,6 +244,9 @@ TS_6 + + + @@ -1155,6 +1158,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/subq.q.xml ql/src/test/results/compiler/plan/subq.q.xml index ad63615..6da8f37 100644 --- ql/src/test/results/compiler/plan/subq.q.xml +++ ql/src/test/results/compiler/plan/subq.q.xml @@ -165,7 +165,7 @@ - + _col0 @@ -177,6 +177,9 @@ TS_6 + + + @@ -837,6 +840,16 @@ TS_0 + + + + key + + + value + + + diff --git ql/src/test/results/compiler/plan/udf1.q.xml ql/src/test/results/compiler/plan/udf1.q.xml index 3e944ad..5d34030 100644 --- ql/src/test/results/compiler/plan/udf1.q.xml +++ ql/src/test/results/compiler/plan/udf1.q.xml @@ -1841,6 +1841,13 @@ TS_0 + + + + key + + + diff --git ql/src/test/results/compiler/plan/udf4.q.xml ql/src/test/results/compiler/plan/udf4.q.xml index 61434ea..634800b 100644 --- ql/src/test/results/compiler/plan/udf4.q.xml +++ ql/src/test/results/compiler/plan/udf4.q.xml @@ -1614,6 +1614,9 @@ TS_0 + + + diff --git ql/src/test/results/compiler/plan/udf6.q.xml ql/src/test/results/compiler/plan/udf6.q.xml index c04aaba..876c14a 100644 --- ql/src/test/results/compiler/plan/udf6.q.xml +++ ql/src/test/results/compiler/plan/udf6.q.xml @@ -471,6 +471,9 @@ TS_0 + + + diff --git ql/src/test/results/compiler/plan/udf_case.q.xml ql/src/test/results/compiler/plan/udf_case.q.xml index 40fe450..31854f7 100644 --- ql/src/test/results/compiler/plan/udf_case.q.xml +++ ql/src/test/results/compiler/plan/udf_case.q.xml @@ -551,6 +551,9 @@ TS_0 + + + diff --git ql/src/test/results/compiler/plan/udf_when.q.xml ql/src/test/results/compiler/plan/udf_when.q.xml index 6b73dbd..7049da3 100644 --- ql/src/test/results/compiler/plan/udf_when.q.xml +++ ql/src/test/results/compiler/plan/udf_when.q.xml @@ -631,6 +631,9 @@ TS_0 + + + diff --git ql/src/test/results/compiler/plan/union.q.xml ql/src/test/results/compiler/plan/union.q.xml index 34224e0..87a2f77 100644 --- ql/src/test/results/compiler/plan/union.q.xml +++ ql/src/test/results/compiler/plan/union.q.xml @@ -165,7 +165,7 @@ - + _col0 @@ -177,6 +177,9 @@ TS_11 + + + @@ -982,6 +985,16 @@ TS_3 + + + + key + + + value + + + @@ -1402,6 +1415,16 @@ TS_0 + + + + key + + + value + + +