diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 36503fa..fa3e048 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -644,6 +644,17 @@ // statistics annotation fetches column statistics for all required columns and for all // required partitions which can be very expensive sometimes HIVE_STATS_FETCH_COLUMN_STATS("hive.stats.fetch.column.stats", false), + // in the absence of table/partition stats, average row size will be used to + // estimate the number of rows/data size + HIVE_STATS_AVG_ROW_SIZE("hive.stats.avg.row.size", 10000), + // in the absence of column statistics, the estimated number of rows/data size that will + // emitted from join operator will depend on t factor + HIVE_STATS_JOIN_FACTOR("hive.stats.join.factor", (float) 1.1), + // in the absence of uncompressed/raw data size, total file size will be used for statistics + // annotation. But the file may be compressed, encoded and serialized which may be lesser in size + // than the actual uncompressed/raw data size. This factor will be multiplied to file size to estimate + // the raw data size. + HIVE_STATS_DESERIALIZATION_FACTOR("hive.stats.deserialization.factor", (float) 1.0), // Concurrency HIVE_SUPPORT_CONCURRENCY("hive.support.concurrency", false), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/AnnotateWithStatistics.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/AnnotateWithStatistics.java index aac447a..ccd102a 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/AnnotateWithStatistics.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/AnnotateWithStatistics.java @@ -23,7 +23,6 @@ import java.util.Map; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; -import org.apache.hadoop.hive.ql.exec.DemuxOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.LimitOperator; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java index 9259637..d03a760 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/stats/annotation/StatsRulesProcFactory.java @@ -23,12 +23,12 @@ import java.util.Map; import java.util.Stack; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.CommonJoinOperator; -import org.apache.hadoop.hive.ql.exec.DemuxOperator; import org.apache.hadoop.hive.ql.exec.FilterOperator; import org.apache.hadoop.hive.ql.exec.GroupByOperator; import org.apache.hadoop.hive.ql.exec.LimitOperator; @@ -53,7 +53,6 @@ import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.Statistics; -import org.apache.hadoop.hive.ql.plan.Statistics.State; import org.apache.hadoop.hive.ql.stats.StatsUtils; import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPAnd; @@ -75,13 +74,16 @@ public class StatsRulesProcFactory { + private static final Log LOG = LogFactory.getLog(StatsRulesProcFactory.class.getName()); + /** - * Collect basic statistics like number of rows, data size and column level - * statistics from the table. Also sets the state of the available statistics. - * Basic and column statistics can have one of the following states - * COMPLETE, PARTIAL, NONE. In case of partitioned table, the basic and column - * stats are aggregated together to table level statistics. - * + * Collect basic statistics like number of rows, data size and column level statistics from the + * table. Also sets the state of the available statistics. Basic and column statistics can have + * one of the following states COMPLETE, PARTIAL, NONE. In case of partitioned table, the basic + * and column stats are aggregated together to table level statistics. Column statistics will not + * be collected if hive.stats.fetch.column.stats is set to false. If basic statistics is not + * available then number of rows will be estimated from file size and average row size (computed + * from schema). */ public static class TableScanStatsRule extends DefaultStatsRule implements NodeProcessor { @@ -102,6 +104,10 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Statistics stats = StatsUtils.collectStatistics(aspCtx.getConf(), partList, table, tsop); try { tsop.setStatistics(stats.clone()); + + if (LOG.isDebugEnabled()) { + LOG.debug("[0] STATS-" + tsop.toString() + ": " + stats.extendedToString()); + } } catch (CloneNotSupportedException e) { throw new SemanticException(ErrorMsg.STATISTICS_CLONING_FAILED.getMsg()); } @@ -110,23 +116,19 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } /** - * SELECT operator doesn't change the number of rows emitted from the parent - * operator. It changes the size of each tuple emitted. In a typical case, - * where only subset of columns are selected the average row size will - * reduce as some of the columns will be pruned. In order to accurately - * compute the average row size, column level statistics is required. - * Column level statistics stores average size of values in column which - * can be used to more reliably estimate the reduction in size of each - * tuple. In the absence of column level statistics, size of columns will be - * based on data type. For primitive data types size from - * {@link org.apache.hadoop.hive.ql.util.JavaDataModel} will be - * used and for variable length data types worst case will be assumed. - * + * SELECT operator doesn't change the number of rows emitted from the parent operator. It changes + * the size of each tuple emitted. In a typical case, where only subset of columns are selected + * the average row size will reduce as some of the columns will be pruned. In order to accurately + * compute the average row size, column level statistics is required. Column level statistics + * stores average size of values in column which can be used to more reliably estimate the + * reduction in size of each tuple. In the absence of column level statistics, size of columns + * will be based on data type. For primitive data types size from + * {@link org.apache.hadoop.hive.ql.util.JavaDataModel} will be used and for variable length data + * types worst case will be assumed. *

* For more information, refer 'Estimating The Cost Of Operations' chapter in * "Database Systems: The Complete Book" by Garcia-Molina et. al. *

- * */ public static class SelectStatsRule extends DefaultStatsRule implements NodeProcessor { @@ -155,15 +157,24 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, try { if (satisfyPrecondition(parentStats)) { Statistics stats = parentStats.clone(); - List colStats = StatsUtils.getColStatisticsFromExprMap(conf, parentStats, - sop.getColumnExprMap(), sop.getSchema()); + List colStats = + StatsUtils.getColStatisticsFromExprMap(conf, parentStats, sop.getColumnExprMap(), + sop.getSchema()); long dataSize = StatsUtils.getDataSizeFromColumnStats(stats.getNumRows(), colStats); stats.setColumnStats(colStats); stats.setDataSize(dataSize); sop.setStatistics(stats); + + if (LOG.isDebugEnabled()) { + LOG.debug("[0] STATS-" + sop.toString() + ": " + stats.extendedToString()); + } } else { if (parentStats != null) { sop.setStatistics(parentStats.clone()); + + if (LOG.isDebugEnabled()) { + LOG.debug("[1] STATS-" + sop.toString() + ": " + parentStats.extendedToString()); + } } } } catch (CloneNotSupportedException e) { @@ -175,16 +186,13 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } /** - * FILTER operator does not change the average row size but it does change - * the number of rows emitted. The reduction in the number of rows emitted - * is dependent on the filter expression. - * + * FILTER operator does not change the average row size but it does change the number of rows + * emitted. The reduction in the number of rows emitted is dependent on the filter expression. *
    * Notations: *
  • T(S) - Number of tuples in relations S
  • *
  • V(S,A) - Number of distinct values of attribute A in relation S
  • *
- * *
    * Rules: *
  • Column equals a constant
  • T(S) = T(R) / V(R,A) @@ -207,20 +215,18 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, *
  • Multiple OR conditions
  • - Simple formula is to evaluate conditions independently * and sum the results T(S) = m1 + m2 *

    - * * - Alternate formula T(S) = T(R) * ( 1 - ( 1 - m1/T(R) ) * ( 1 - m2/T(R) )) *

    * where, m1 is the number of tuples that satisfy condition1 and m2 is the number of tuples that * satisfy condition2 *

*

- * Worst case: If no column statistics are available, then T(R) = T(R)/2 will be - * used as heuristics. + * Worst case: If no column statistics are available, then evaluation of predicate + * expression will assume worst case (i.e; half the input rows) for each of predicate expression. *

* For more information, refer 'Estimating The Cost Of Operations' chapter in * "Database Systems: The Complete Book" by Garcia-Molina et. al. *

- * */ public static class FilterStatsRule extends DefaultStatsRule implements NodeProcessor { @@ -231,26 +237,47 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, FilterOperator fop = (FilterOperator) nd; Operator parent = fop.getParentOperators().get(0); Statistics parentStats = parent.getStatistics(); + List neededCols = null; + if (parent instanceof TableScanOperator) { + TableScanOperator tsop = (TableScanOperator) parent; + neededCols = tsop.getNeededColumns(); + } try { - if (satisfyPrecondition(parentStats)) { + if (parentStats != null) { ExprNodeDesc pred = fop.getConf().getPredicate(); // evaluate filter expression and update statistics - long newNumRows = evaluateExpression(parentStats, pred, aspCtx); + long newNumRows = evaluateExpression(parentStats, pred, aspCtx, neededCols); Statistics st = parentStats.clone(); - updateStats(st, newNumRows); - fop.setStatistics(st); - } else { - if (parentStats != null) { - // worst case, in the absence of column statistics assume half the rows are emitted - Statistics wcStats = getWorstCaseStats(parentStats.clone()); - fop.setStatistics(wcStats); + if (satisfyPrecondition(parentStats)) { + + // update statistics based on column statistics. + // OR conditions keeps adding the stats independently, this may + // result in number of rows getting more than the input rows in + // which case stats need not be updated + if (newNumRows <= parentStats.getNumRows()) { + updateStats(st, newNumRows, true); + } + + if (LOG.isDebugEnabled()) { + LOG.debug("[0] STATS-" + fop.toString() + ": " + st.extendedToString()); + } + } else { + + // update only the basic statistics in the absence of column statistics + if (newNumRows <= parentStats.getNumRows()) { + updateStats(st, newNumRows, false); + } + + if (LOG.isDebugEnabled()) { + LOG.debug("[1] STATS-" + fop.toString() + ": " + st.extendedToString()); + } } + fop.setStatistics(st); + aspCtx.setAndExprStats(null); } - - aspCtx.setAndExprStats(null); } catch (CloneNotSupportedException e) { throw new SemanticException(ErrorMsg.STATISTICS_CLONING_FAILED.getMsg()); } @@ -258,7 +285,7 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } private long evaluateExpression(Statistics stats, ExprNodeDesc pred, - AnnotateStatsProcCtx aspCtx) throws CloneNotSupportedException { + AnnotateStatsProcCtx aspCtx, List neededCols) throws CloneNotSupportedException { long newNumRows = 0; Statistics andStats = null; if (pred instanceof ExprNodeGenericFuncDesc) { @@ -272,28 +299,24 @@ private long evaluateExpression(Statistics stats, ExprNodeDesc pred, // evaluate children for (ExprNodeDesc child : genFunc.getChildren()) { - newNumRows = evaluateChildExpr(aspCtx.getAndExprStats(), child, aspCtx); - updateStats(aspCtx.getAndExprStats(), newNumRows); + newNumRows = evaluateChildExpr(aspCtx.getAndExprStats(), child, aspCtx, neededCols); + if (satisfyPrecondition(aspCtx.getAndExprStats())) { + updateStats(aspCtx.getAndExprStats(), newNumRows, true); + } else { + updateStats(aspCtx.getAndExprStats(), newNumRows, false); + } } - } else { - + } else if (udf instanceof GenericUDFOPOr) { // for OR condition independently compute and update stats - if (udf instanceof GenericUDFOPOr) { - for (ExprNodeDesc child : genFunc.getChildren()) { - newNumRows += evaluateChildExpr(stats, child, aspCtx); - } - } else if (udf instanceof GenericUDFOPNot) { - newNumRows = evaluateNotExpr(stats, pred, aspCtx); - } else if (udf instanceof GenericUDFOPNotNull) { - newNumRows = evaluateColEqualsNullExpr(stats, pred, aspCtx); - newNumRows = stats.getNumRows() - newNumRows; - } else if (udf instanceof GenericUDFOPNull) { - newNumRows = evaluateColEqualsNullExpr(stats, pred, aspCtx); - } else { - - // single predicate condition - newNumRows = evaluateChildExpr(stats, pred, aspCtx); + for (ExprNodeDesc child : genFunc.getChildren()) { + newNumRows += evaluateChildExpr(stats, child, aspCtx, neededCols); } + } else if (udf instanceof GenericUDFOPNot) { + newNumRows = evaluateNotExpr(stats, pred, aspCtx, neededCols); + } else { + + // single predicate condition + newNumRows = evaluateChildExpr(stats, pred, aspCtx, neededCols); } } else if (pred instanceof ExprNodeColumnDesc) { @@ -304,19 +327,20 @@ private long evaluateExpression(Statistics stats, ExprNodeDesc pred, String colType = encd.getTypeString(); if (colType.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)) { ColStatistics cs = stats.getColumnStatisticsForColumn(tabAlias, colName); - return cs.getNumTrues(); - } else { - - // if not boolean column return half the number of rows - return stats.getNumRows() / 2; + if (cs != null) { + return cs.getNumTrues(); + } } + + // if not boolean column return half the number of rows + return stats.getNumRows() / 2; } return newNumRows; } - private long evaluateNotExpr(Statistics stats, ExprNodeDesc pred, AnnotateStatsProcCtx aspCtx) - throws CloneNotSupportedException { + private long evaluateNotExpr(Statistics stats, ExprNodeDesc pred, AnnotateStatsProcCtx aspCtx, + List neededCols) throws CloneNotSupportedException { long numRows = stats.getNumRows(); @@ -329,7 +353,7 @@ private long evaluateNotExpr(Statistics stats, ExprNodeDesc pred, AnnotateStatsP // GenericUDF long newNumRows = 0; for (ExprNodeDesc child : ((ExprNodeGenericFuncDesc) pred).getChildren()) { - newNumRows = evaluateChildExpr(stats, child, aspCtx); + newNumRows = evaluateChildExpr(stats, child, aspCtx, neededCols); } return numRows - newNumRows; } else if (leaf instanceof ExprNodeConstantDesc) { @@ -348,18 +372,18 @@ private long evaluateNotExpr(Statistics stats, ExprNodeDesc pred, AnnotateStatsP String colType = encd.getTypeString(); if (colType.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)) { ColStatistics cs = stats.getColumnStatisticsForColumn(tabAlias, colName); - return cs.getNumFalses(); - } else { - - // if not boolean column return half the number of rows - return numRows / 2; + if (cs != null) { + return cs.getNumFalses(); + } } + // if not boolean column return half the number of rows + return numRows / 2; } } } // worst case - return numRows; + return numRows / 2; } private long evaluateColEqualsNullExpr(Statistics stats, ExprNodeDesc pred, @@ -380,26 +404,19 @@ private long evaluateColEqualsNullExpr(Statistics stats, ExprNodeDesc pred, ColStatistics cs = stats.getColumnStatisticsForColumn(tabAlias, colName); if (cs != null) { long dvs = cs.getCountDistint(); - // if NULLs exists, add 1 to distinct count - if (cs.getNumNulls() > 0) { - dvs += 1; - } - if (dvs != 0) { - return numRows / dvs; - } else { - return numRows; - } + numRows = dvs == 0 ? numRows / 2 : numRows / dvs; + return numRows; } } } } // worst case - return numRows; + return numRows / 2; } - private long evaluateChildExpr(Statistics stats, ExprNodeDesc child, AnnotateStatsProcCtx aspCtx) - throws CloneNotSupportedException { + private long evaluateChildExpr(Statistics stats, ExprNodeDesc child, + AnnotateStatsProcCtx aspCtx, List neededCols) throws CloneNotSupportedException { long numRows = stats.getNumRows(); @@ -421,19 +438,19 @@ private long evaluateChildExpr(Statistics stats, ExprNodeDesc child, AnnotateSta isConst = true; continue; } + + // if column name is not contained in needed column list then it + // is a partition column. We do not need to evaluate partition columns + // in filter expression since it will be taken care by partitio pruner + if (neededCols != null && !neededCols.contains(colName)) { + return numRows; + } + ColStatistics cs = stats.getColumnStatisticsForColumn(tabAlias, colName); if (cs != null) { long dvs = cs.getCountDistint(); - // if NULLs exists, add 1 to distinct count - if (cs.getNumNulls() > 0) { - dvs += 1; - } - - if (dvs != 0) { - return numRows / dvs; - } else { - return numRows; - } + numRows = dvs == 0 ? numRows / 2 : numRows / dvs; + return numRows; } } else if (leaf instanceof ExprNodeColumnDesc) { ExprNodeColumnDesc colDesc = (ExprNodeColumnDesc) leaf; @@ -442,53 +459,56 @@ private long evaluateChildExpr(Statistics stats, ExprNodeDesc child, AnnotateSta // if const is first argument then evaluate the result if (isConst) { + + // if column name is not contained in needed column list then it + // is a partition column. We do not need to evaluate partition columns + // in filter expression since it will be taken care by partitio pruner + if (neededCols != null && neededCols.indexOf(colName) == -1) { + return numRows; + } + ColStatistics cs = stats.getColumnStatisticsForColumn(tabAlias, colName); if (cs != null) { long dvs = cs.getCountDistint(); - // if NULLs exists, add 1 to distinct count - if (cs.getNumNulls() > 0) { - dvs += 1; - } - - if (dvs != 0) { - return numRows / dvs; - } else { - return numRows; - } + numRows = dvs == 0 ? numRows / 2 : numRows / dvs; + return numRows; } } } } } else if (udf instanceof GenericUDFOPNotEqual) { return numRows; - } else if (udf instanceof GenericUDFOPEqualOrGreaterThan || - udf instanceof GenericUDFOPEqualOrLessThan || - udf instanceof GenericUDFOPGreaterThan || - udf instanceof GenericUDFOPLessThan) { + } else if (udf instanceof GenericUDFOPEqualOrGreaterThan + || udf instanceof GenericUDFOPEqualOrLessThan || udf instanceof GenericUDFOPGreaterThan + || udf instanceof GenericUDFOPLessThan) { return numRows / 3; - } else { - return evaluateExpression(stats, genFunc, aspCtx); + } else if (udf instanceof GenericUDFOPNotNull) { + long newNumRows = evaluateColEqualsNullExpr(stats, genFunc, aspCtx); + return stats.getNumRows() - newNumRows; + } else if (udf instanceof GenericUDFOPNull) { + return evaluateColEqualsNullExpr(stats, genFunc, aspCtx); + } else if (udf instanceof GenericUDFOPAnd || udf instanceof GenericUDFOPOr + || udf instanceof GenericUDFOPNot) { + return evaluateExpression(stats, genFunc, aspCtx, neededCols); } } // worst case - return numRows; + return numRows / 2; } } /** - * GROUPBY operator changes the number of rows. The number of rows emitted - * by GBY operator will be atleast 1 or utmost T(R) (number of rows in relation T) - * based on the aggregation. A better estimate can be found if we have column statistics - * on the columns that we are grouping on. + * GROUPBY operator changes the number of rows. The number of rows emitted by GBY operator will be + * atleast 1 or utmost T(R) (number of rows in relation T) based on the aggregation. A better + * estimate can be found if we have column statistics on the columns that we are grouping on. *

* Suppose if we are grouping by attributes A,B,C and if statistics for columns A,B,C are * available then a better estimate can be found by taking the smaller of product of V(R,[A,B,C]) * (product of distinct cardinalities of A,B,C) and T(R)/2. *

* T(R) = min (T(R)/2 , V(R,[A,B,C]) ---> [1] - * *

* In the presence of grouping sets, map-side GBY will emit more rows depending on the size of * grouping set (input rows * size of grouping set). These rows will get reduced because of @@ -503,27 +523,23 @@ private long evaluateChildExpr(Statistics stats, ExprNodeDesc child, AnnotateSta * T(R) = min(T(R)/2, T(R, GBY(A,B)) + T(R, GBY(A)) + T(R, GBY(B)) + 1)) *

* where, GBY(A,B), GBY(B), GBY(B) are the GBY rules mentioned above [1] - * *

* If hash-aggregation is disabled, apply the GBY rule [1] and then multiply the result by * number of elements in grouping set T(R) = T(R) * length_of_grouping_set. Since we do not know * if hash-aggregation is enabled or disabled during compile time, we will assume worst-case i.e, * hash-aggregation is disabled - * *

* NOTE: The number of rows from map-side GBY operator is dependent on map-side parallelism i.e, * number of mappers. The map-side parallelism is expected from hive config * "hive.stats.map.parallelism". If the config is not set then default parallelism of 1 will be * assumed. - * *

- * Worst case: If no column statistics are available, then T(R) = T(R)/2 will be - * used as heuristics. + * Worst case: If no column statistics are available, then T(R) = T(R)/2 will be used as + * heuristics. *

* For more information, refer 'Estimating The Cost Of Operations' chapter in * "Database Systems: The Complete Book" by Garcia-Molina et. al. *

- * */ public static class GroupByStatsRule extends DefaultStatsRule implements NodeProcessor { @@ -535,8 +551,8 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, Statistics parentStats = parent.getStatistics(); AnnotateStatsProcCtx aspCtx = (AnnotateStatsProcCtx) procCtx; HiveConf conf = aspCtx.getConf(); - int mapSideParallelism = HiveConf.getIntVar(conf, - HiveConf.ConfVars.HIVE_STATS_MAP_SIDE_PARALLELISM); + int mapSideParallelism = + HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_STATS_MAP_SIDE_PARALLELISM); List aggDesc = gop.getConf().getAggregators(); Map colExprMap = gop.getColumnExprMap(); RowSchema rs = gop.getSchema(); @@ -546,8 +562,8 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, if (satisfyPrecondition(parentStats)) { stats = parentStats.clone(); - List colStats = StatsUtils.getColStatisticsFromExprMap(conf, parentStats, - colExprMap, rs); + List colStats = + StatsUtils.getColStatisticsFromExprMap(conf, parentStats, colExprMap, rs); stats.setColumnStats(colStats); long dvProd = 1; long newNumRows = 0; @@ -563,7 +579,8 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } else { // partial column statistics on grouping attributes case. - // if column statistics on grouping attribute is missing, then assume worst case. + // if column statistics on grouping attribute is missing, then + // assume worst case. // GBY rule will emit half the number of rows if dvProd is 0 dvProd = 0; break; @@ -574,8 +591,8 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, if (gop.getChildOperators().get(0) instanceof ReduceSinkOperator) { // since we do not know if hash-aggregation will be enabled or disabled - // at runtime we will assume that map-side group by does not do any reduction. - // hence no group by rule will be applied + // at runtime we will assume that map-side group by does not do any + // reduction.hence no group by rule will be applied // map-side grouping set present. if grouping set is present then // multiply the number of rows by number of elements in grouping set @@ -599,13 +616,13 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, // map side no grouping set newNumRows = stats.getNumRows() * mapSideParallelism; - updateStats(stats, newNumRows); + updateStats(stats, newNumRows, true); } } else { // reduce side newNumRows = applyGBYRule(stats.getNumRows(), dvProd); - updateStats(stats, newNumRows); + updateStats(stats, newNumRows, true); } } else { if (parentStats != null) { @@ -618,7 +635,9 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } else { // reduce side - stats = getWorstCaseStats(parentStats); + stats = parentStats.clone(); + long newNumRows = parentStats.getNumRows() / 2; + updateStats(stats, newNumRows, false); } } } @@ -647,14 +666,19 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, stats.addToColumnStats(aggColStats); // if UDAF present and if column expression map is empty then it must - // be full aggregation query like count(*) in which case number of rows will be 1 + // be full aggregation query like count(*) in which case number of + // rows will be 1 if (colExprMap.isEmpty()) { stats.setNumRows(1); - updateStats(stats, 1); + updateStats(stats, 1, true); } } gop.setStatistics(stats); + + if (LOG.isDebugEnabled() && stats != null) { + LOG.debug("[0] STATS-" + gop.toString() + ": " + stats.extendedToString()); + } } catch (CloneNotSupportedException e) { throw new SemanticException(ErrorMsg.STATISTICS_CLONING_FAILED.getMsg()); } @@ -683,7 +707,6 @@ private long applyGBYRule(long numRows, long dvProd) { * will have a tuple in R T(RXS) = T(S) (we need histograms for this)
  • Both R & S relation * have same value for join-key. Ex: bool column with all true values T(RXS) = T(R) * T(S) (we * need histograms for this. counDistinct = 1 and same value)
  • - * *

    * In the absence of histograms, we can use the following general case *

    @@ -695,10 +718,11 @@ private long applyGBYRule(long numRows, long dvProd) { *

    * T(RXS) = T(R)*T(S)/max(V(R,y1), V(S,y1)) * max(V(R,y2), V(S,y2)), where y1 and y2 are the join * attributes - * *

    - * Worst case: If no column statistics are available, then T(RXS) = T(R)*T(S)/2 will be - * used as heuristics. + * Worst case: If no column statistics are available, then T(RXS) = joinFactor * max(T(R), + * T(S)) * (numParents - 1) will be used as heuristics. joinFactor is from hive.stats.join.factor + * hive config. In the worst case, since we do not know any information about join keys (and hence + * which of the 3 cases to use), we let it to the user to provide the join factor. *

    * For more information, refer 'Estimating The Cost Of Operations' chapter in * "Database Systems: The Complete Book" by Garcia-Molina et. al. @@ -730,16 +754,15 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, } } - try { if (allSatisfyPreCondition) { - // statistics object that is combination of statistics from all relations involved in JOIN + // statistics object that is combination of statistics from all + // relations involved in JOIN Statistics stats = new Statistics(); long prodRows = 1; List distinctVals = Lists.newArrayList(); boolean multiAttr = false; - Map joinedColStats = Maps.newHashMap(); Map> joinKeys = Maps.newHashMap(); @@ -756,33 +779,34 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, multiAttr = true; } - // compute fully qualified join key column names. this name will be used to - // quickly look-up for column statistics of join key. - // TODO: expressions in join condition will be ignored. assign internal name - // for expressions and estimate column statistics for expression. - List fqCols = StatsUtils.getFullQualifedColNameFromExprs(keyExprs, - parent.getColumnExprMap()); + // compute fully qualified join key column names. this name will be + // used to quickly look-up for column statistics of join key. + // TODO: expressions in join condition will be ignored. assign + // internal name for expressions and estimate column statistics for expression. + List fqCols = + StatsUtils.getFullQualifedColNameFromExprs(keyExprs, parent.getColumnExprMap()); joinKeys.put(pos, fqCols); Map colExprMap = parent.getColumnExprMap(); RowSchema rs = parent.getSchema(); // get column statistics for all output columns - List cs = StatsUtils.getColStatisticsFromExprMap(conf, parentStats, - colExprMap, rs); + List cs = + StatsUtils.getColStatisticsFromExprMap(conf, parentStats, colExprMap, rs); for (ColStatistics c : cs) { if (c != null) { joinedColStats.put(c.getFullyQualifiedColName(), c); } } - // since new statistics is derived from all relations involved in JOIN, - // we need to update the state information accordingly + // since new statistics is derived from all relations involved in + // JOIN, we need to update the state information accordingly stats.updateColumnStatsState(parentStats.getColumnStatsState()); } - // compute denominator i.e, max(V(R,Y), V(S,Y)) in case of single attribute join. - // else max(V(R,y1), V(S,y1)) * max(V(R,y2), V(S,y2)) in case of multi-attribute join + // compute denominator i.e, max(V(R,Y), V(S,Y)) in case of single + // attribute join, else max(V(R,y1), V(S,y1)) * max(V(R,y2), V(S,y2)) + // in case of multi-attribute join long denom = 1; if (multiAttr) { List perAttrDVs = Lists.newArrayList(); @@ -845,28 +869,45 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, stats.setNumRows(newRowCount); stats.setDataSize(StatsUtils.getDataSizeFromColumnStats(newRowCount, outColStats)); jop.setStatistics(stats); - } else { - // worst case, when no column statistics are available - if (parents.size() > 1) { - Statistics wcStats = new Statistics(); - Statistics stp1 = parents.get(0).getStatistics(); - long numRows = stp1.getNumRows(); - long avgRowSize = stp1.getAvgRowSize(); - for (int i = 1; i < parents.size(); i++) { - stp1 = parents.get(i).getStatistics(); - numRows = (numRows * stp1.getNumRows()) / 2; - avgRowSize += stp1.getAvgRowSize(); - } - wcStats.setNumRows(numRows); - wcStats.setDataSize(numRows * avgRowSize); - jop.setStatistics(wcStats); - } else { - jop.setStatistics(parents.get(0).getStatistics().clone()); + if (LOG.isDebugEnabled()) { + LOG.debug("[0] STATS-" + jop.toString() + ": " + stats.extendedToString()); + } + } else { + + // worst case when there are no column statistics + float joinFactor = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVE_STATS_JOIN_FACTOR); + int numParents = parents.size(); + List parentRows = Lists.newArrayList(); + List parentSizes = Lists.newArrayList(); + int maxRowIdx = 0; + long maxRowCount = 0; + int idx = 0; + + for (Operator op : parents) { + Statistics ps = op.getStatistics(); + long rowCount = ps.getNumRows(); + if (rowCount > maxRowCount) { + maxRowCount = rowCount; + maxRowIdx = idx; } + parentRows.add(rowCount); + parentSizes.add(ps.getDataSize()); + idx++; + } + + long maxDataSize = parentSizes.get(maxRowIdx); + long newNumRows = (long) (joinFactor * maxRowCount * (numParents - 1)); + long newDataSize = (long) (joinFactor * maxDataSize * (numParents - 1)); + + Statistics wcStats = new Statistics(); + wcStats.setNumRows(newNumRows); + wcStats.setDataSize(newDataSize); + jop.setStatistics(wcStats); + + if (LOG.isDebugEnabled()) { + LOG.debug("[1] STATS-" + jop.toString() + ": " + wcStats.extendedToString()); } - } catch (CloneNotSupportedException e) { - throw new SemanticException(ErrorMsg.STATISTICS_CLONING_FAILED.getMsg()); } } return null; @@ -874,22 +915,22 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, private long getDenominator(List distinctVals) { - if(distinctVals.isEmpty()) { + if (distinctVals.isEmpty()) { - // TODO: in union20.q the tab alias is not properly propagated down the operator - // tree. This happens when UNION ALL is used as sub query. Hence, even if column - // statistics are available, the tab alias will be null which will fail to get - // proper column statistics. For now assume, worst case in which denominator is 2. + // TODO: in union20.q the tab alias is not properly propagated down the + // operator tree. This happens when UNION ALL is used as sub query. Hence, even + // if column statistics are available, the tab alias will be null which will fail + // to get proper column statistics. For now assume, worst case in which + // denominator is 2. return 2; } - // simple join from 2 relations - // denom = max(v1, v2) + // simple join from 2 relations: denom = max(v1, v2) if (distinctVals.size() <= 2) { return Collections.max(distinctVals); } else { - // join from multiple relations + // join from multiple relations: // denom = max(v1, v2) * max(v2, v3) * max(v3, v4) long denom = 1; for (int i = 0; i < distinctVals.size() - 1; i++) { @@ -909,7 +950,6 @@ private long getDenominator(List distinctVals) { /** * LIMIT operator changes the number of rows and thereby the data size. - * */ public static class LimitStatsRule extends DefaultStatsRule implements NodeProcessor { @@ -919,6 +959,8 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, LimitOperator lop = (LimitOperator) nd; Operator parent = lop.getParentOperators().get(0); Statistics parentStats = parent.getStatistics(); + AnnotateStatsProcCtx aspCtx = (AnnotateStatsProcCtx) procCtx; + HiveConf conf = aspCtx.getConf(); try { long limit = -1; @@ -927,25 +969,37 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, if (satisfyPrecondition(parentStats)) { Statistics stats = parentStats.clone(); - // if limit is greater than available rows then do not update statistics + // if limit is greater than available rows then do not update + // statistics if (limit <= parentStats.getNumRows()) { - updateStats(stats, limit); + updateStats(stats, limit, true); } lop.setStatistics(stats); + + if (LOG.isDebugEnabled()) { + LOG.debug("[0] STATS-" + lop.toString() + ": " + stats.extendedToString()); + } } else { if (parentStats != null) { - // in the absence of column statistics, compute data size based on based - // on average row size + // in the absence of column statistics, compute data size based on + // based on average row size Statistics wcStats = parentStats.clone(); if (limit <= parentStats.getNumRows()) { long numRows = limit; long avgRowSize = parentStats.getAvgRowSize(); + if (avgRowSize <= 0) { + avgRowSize = HiveConf.getIntVar(conf, HiveConf.ConfVars.HIVE_STATS_AVG_ROW_SIZE); + } long dataSize = avgRowSize * limit; wcStats.setNumRows(numRows); wcStats.setDataSize(dataSize); } lop.setStatistics(wcStats); + + if (LOG.isDebugEnabled()) { + LOG.debug("[1] STATS-" + lop.toString() + ": " + wcStats.extendedToString()); + } } } } catch (CloneNotSupportedException e) { @@ -958,7 +1012,6 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, /** * Default rule is to aggregate the statistics from all its parent operators. - * */ public static class DefaultStatsRule implements NodeProcessor { @@ -973,8 +1026,8 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, if (stats == null) { if (op.getParentOperators() != null) { - // if parent statistics is null then that branch of the tree is not walked yet. - // don't update the stats until all branches are walked + // if parent statistics is null then that branch of the tree is not + // walked yet. don't update the stats until all branches are walked if (isAllParentsContainStatistics(op)) { stats = new Statistics(); for (Operator parent : op.getParentOperators()) { @@ -985,6 +1038,10 @@ public Object process(Node nd, Stack stack, NodeProcessorCtx procCtx, stats.updateColumnStatsState(parentStats.getColumnStatsState()); stats.addToColumnStats(parentStats.getColumnStats()); op.getConf().setStatistics(stats); + + if (LOG.isDebugEnabled()) { + LOG.debug("[0] STATS-" + op.toString() + ": " + stats.extendedToString()); + } } } } @@ -1036,37 +1093,43 @@ public static NodeProcessor getDefaultRule() { /** * Update the basic statistics of the statistics object based on the row number - * * @param stats * - statistics to be updated * @param newNumRows * - new number of rows + * @param useColStats + * - use column statistics to compute data size */ - static void updateStats(Statistics stats, long newNumRows) { + static void updateStats(Statistics stats, long newNumRows, boolean useColStats) { long oldRowCount = stats.getNumRows(); double ratio = (double) newNumRows / (double) oldRowCount; stats.setNumRows(newNumRows); - List colStats = stats.getColumnStats(); - for (ColStatistics cs : colStats) { - long oldNumNulls = cs.getNumNulls(); - long oldDV = cs.getCountDistint(); - long newNumNulls = Math.round(ratio * oldNumNulls); - long newDV = oldDV; - - // if ratio is greater than 1, then number of rows increases. This can happen - // when some operators like GROUPBY duplicates the input rows in which case - // number of distincts should not change. Update the distinct count only when - // the output number of rows is less than input number of rows. - if (ratio <= 1.0) { - newDV = Math.round(ratio * oldDV); + if (useColStats) { + List colStats = stats.getColumnStats(); + for (ColStatistics cs : colStats) { + long oldNumNulls = cs.getNumNulls(); + long oldDV = cs.getCountDistint(); + long newNumNulls = Math.round(ratio * oldNumNulls); + long newDV = oldDV; + + // if ratio is greater than 1, then number of rows increases. This can happen + // when some operators like GROUPBY duplicates the input rows in which case + // number of distincts should not change. Update the distinct count only when + // the output number of rows is less than input number of rows. + if (ratio <= 1.0) { + newDV = Math.round(ratio * oldDV); + } + cs.setNumNulls(newNumNulls); + cs.setCountDistint(newDV); } - cs.setNumNulls(newNumNulls); - cs.setCountDistint(newDV); + stats.setColumnStats(colStats); + long newDataSize = StatsUtils.getDataSizeFromColumnStats(newNumRows, colStats); + stats.setDataSize(newDataSize); + } else { + long newDataSize = (long) (ratio * stats.getDataSize()); + stats.setDataSize(newDataSize); } - stats.setColumnStats(colStats); - long newDataSize = StatsUtils.getDataSizeFromColumnStats(newNumRows, colStats); - stats.setDataSize(newDataSize); } static boolean satisfyPrecondition(Statistics stats) { @@ -1074,16 +1137,4 @@ static boolean satisfyPrecondition(Statistics stats) { && !stats.getColumnStatsState().equals(Statistics.State.NONE); } - static Statistics getWorstCaseStats(Statistics stats) throws CloneNotSupportedException { - Statistics wcClone = stats.clone(); - long numRows = wcClone.getNumRows() / 2; - long dataSize = wcClone.getDataSize() / 2; - long avgRowSize = wcClone.getAvgRowSize(); - if (numRows > 0) { - dataSize = avgRowSize * numRows; - } - wcClone.setNumRows(numRows); - wcClone.setDataSize(dataSize); - return wcClone; - } } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java index baa0b46..02a8bdc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/Statistics.java @@ -115,6 +115,21 @@ public String toString() { return sb.toString(); } + public String extendedToString() { + StringBuilder sb = new StringBuilder(); + sb.append(" numRows: "); + sb.append(numRows); + sb.append(" dataSize: "); + sb.append(dataSize); + sb.append(" basicStatsState: "); + sb.append(basicStatsState); + sb.append(" colStatsState: "); + sb.append(columnStatsState); + sb.append(" colStats: "); + sb.append(columnStats); + return sb.toString(); + } + @Override public Statistics clone() throws CloneNotSupportedException { Statistics clone = new Statistics(numRows, dataSize); @@ -215,7 +230,10 @@ public long getAvgRowSize() { } public ColStatistics getColumnStatisticsFromFQColName(String fqColName) { - return columnStats.get(fqColName); + if (columnStats != null) { + return columnStats.get(fqColName); + } + return null; } public ColStatistics getColumnStatisticsFromColName(String colName) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java index 200ddf6..e89e3a4 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java @@ -3,6 +3,8 @@ import java.util.List; import java.util.Map; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; @@ -62,9 +64,10 @@ public class StatsUtils { + private static final Log LOG = LogFactory.getLog(StatsUtils.class.getName()); + /** * Collect table, partition and column level statistics - * * @param conf * - hive configuration * @param partList @@ -86,26 +89,38 @@ public static Statistics collectStatistics(HiveConf conf, PrunedPartitionList pa List neededColumns = tableScanOperator.getNeededColumns(); String dbName = table.getDbName(); String tabName = table.getTableName(); - boolean fetchColStats = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_FETCH_COLUMN_STATS); + boolean fetchColStats = + HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_FETCH_COLUMN_STATS); + float deserFactor = + HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVE_STATS_DESERIALIZATION_FACTOR); if (!table.isPartitioned()) { long nr = getNumRows(dbName, tabName); - long rds = getRawDataSize(dbName, tabName); - if (rds <= 0) { - rds = getTotalSize(dbName, tabName); + long ds = getRawDataSize(dbName, tabName); + if (ds <= 0) { + ds = getTotalSize(dbName, tabName); // if data size is still 0 then get file size - if (rds <= 0) { - rds = getFileSizeForTable(conf, table); + if (ds <= 0) { + ds = getFileSizeForTable(conf, table); } + + ds = (long) (ds * deserFactor); } // number of rows -1 means that statistics from metastore is not reliable + // and 0 means statistics gathering is disabled if (nr <= 0) { - nr = 0; + int avgRowSize = estimateRowSizeFromSchema(conf, schema, neededColumns); + if (avgRowSize > 0) { + if (LOG.isDebugEnabled()) { + LOG.debug("Estimated average row size: " + avgRowSize); + } + nr = ds / avgRowSize; + } } stats.setNumRows(nr); - stats.setDataSize(rds); + stats.setDataSize(ds); List colStats = Lists.newArrayList(); if (fetchColStats) { @@ -126,7 +141,7 @@ public static Statistics collectStatistics(HiveConf conf, PrunedPartitionList pa if (!checkIfColStatsAvailable(colStats)) { // if there is column projection and if we do not have stats then mark - // it as NONE. Else we will have stats for const/udf columns + // it as NONE. Else we will have estimated stats for const/udf columns if (!neededColumns.isEmpty()) { stats.setColumnStatsState(Statistics.State.NONE); } else { @@ -144,31 +159,56 @@ public static Statistics collectStatistics(HiveConf conf, PrunedPartitionList pa partNames.add(part.getName()); } - List rowCounts = getBasicStatForPartitions(table, partNames, - StatsSetupConst.ROW_COUNT); - List dataSizes = getBasicStatForPartitions(table, partNames, - StatsSetupConst.RAW_DATA_SIZE); + List rowCounts = + getBasicStatForPartitions(table, partNames, StatsSetupConst.ROW_COUNT); + List dataSizes = + getBasicStatForPartitions(table, partNames, StatsSetupConst.RAW_DATA_SIZE); long nr = getSumIgnoreNegatives(rowCounts); - long rds = getSumIgnoreNegatives(dataSizes); - if (rds <= 0) { + long ds = getSumIgnoreNegatives(dataSizes); + if (ds <= 0) { dataSizes = getBasicStatForPartitions(table, partNames, StatsSetupConst.TOTAL_SIZE); - rds = getSumIgnoreNegatives(dataSizes); + ds = getSumIgnoreNegatives(dataSizes); // if data size still could not be determined, then fall back to filesytem to get file // sizes - if (rds <= 0) { + if (ds <= 0) { dataSizes = getFileSizeForPartitions(conf, partList.getNotDeniedPartns()); } - rds = getSumIgnoreNegatives(dataSizes); + ds = getSumIgnoreNegatives(dataSizes); + + ds = (long) (ds * deserFactor); } - // number of rows -1 means that statistics from metastore is not reliable - if (nr <= 0) { - nr = 0; + int avgRowSize = estimateRowSizeFromSchema(conf, schema, neededColumns); + if (avgRowSize > 0) { + if (LOG.isDebugEnabled()) { + LOG.debug("Estimated average row size: " + avgRowSize); + } + + for (int i = 0; i < rowCounts.size(); i++) { + long rc = rowCounts.get(i); + long s = dataSizes.get(i); + if (rc <= 0 && s > 0) { + rc = s / avgRowSize; + rowCounts.set(i, rc); + } + + if (s <= 0 && rc > 0) { + s = rc * avgRowSize; + dataSizes.set(i, s); + } + } + nr = getSumIgnoreNegatives(rowCounts); + ds = getSumIgnoreNegatives(dataSizes); + + // number of rows -1 means that statistics from metastore is not reliable + if (nr <= 0) { + nr = ds / avgRowSize; + } } stats.addToNumRows(nr); - stats.addToDataSize(rds); + stats.addToDataSize(ds); // if atleast a partition does not contain row count then mark basic stats state as PARTIAL if (containsNonPositives(rowCounts)) { @@ -187,7 +227,7 @@ public static Statistics collectStatistics(HiveConf conf, PrunedPartitionList pa stats.updateColumnStatsState(Statistics.State.COMPLETE); } else { // if there is column projection and if we do not have stats then mark - // it as NONE. Else we will have stats for const/udf columns + // it as NONE. Else we will have estimated stats for const/udf columns if (!neededColumns.isEmpty()) { stats.updateColumnStatsState(Statistics.State.NONE); } else { @@ -202,9 +242,40 @@ public static Statistics collectStatistics(HiveConf conf, PrunedPartitionList pa return stats; } + public static int estimateRowSizeFromSchema(HiveConf conf, List schema, + List neededColumns) { + int avgRowSize = 0; + for (String neededCol : neededColumns) { + ColumnInfo ci = getColumnInfoForColumn(neededCol, schema); + ObjectInspector oi = ci.getObjectInspector(); + String colType = ci.getTypeName(); + if (colType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME) + || colType.equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME) + || colType.startsWith(serdeConstants.VARCHAR_TYPE_NAME) + || colType.startsWith(serdeConstants.CHAR_TYPE_NAME) + || colType.startsWith(serdeConstants.LIST_TYPE_NAME) + || colType.startsWith(serdeConstants.MAP_TYPE_NAME) + || colType.startsWith(serdeConstants.STRUCT_TYPE_NAME) + || colType.startsWith(serdeConstants.UNION_TYPE_NAME)) { + avgRowSize += getAvgColLenOfVariableLengthTypes(conf, oi, colType); + } else { + avgRowSize += getAvgColLenOfFixedLengthTypes(colType); + } + } + return avgRowSize; + } + + private static ColumnInfo getColumnInfoForColumn(String neededCol, List schema) { + for (ColumnInfo ci : schema) { + if (ci.getInternalName().equalsIgnoreCase(neededCol)) { + return ci; + } + } + return null; + } + /** * Find the bytes on disk occupied by a table - * * @param conf * - hive conf * @param table @@ -225,7 +296,6 @@ public static long getFileSizeForTable(HiveConf conf, Table table) { /** * Find the bytes on disks occupied by list of partitions - * * @param conf * - hive conf * @param parts @@ -259,7 +329,6 @@ private static boolean containsNonPositives(List vals) { /** * Get sum of all values in the list that are >0 - * * @param vals * - list of values * @return sum @@ -276,7 +345,6 @@ public static long getSumIgnoreNegatives(List vals) { /** * Get the partition level columns statistics from metastore for all the needed columns - * * @param table * - table object * @param part @@ -312,7 +380,6 @@ public static long getSumIgnoreNegatives(List vals) { /** * Get the partition level columns statistics from metastore for a specific column - * * @param dbName * - database name * @param tabName @@ -326,8 +393,8 @@ public static long getSumIgnoreNegatives(List vals) { public static ColStatistics getParitionColumnStatsForColumn(String dbName, String tabName, String partName, String colName) { try { - ColumnStatistics colStats = Hive.get().getPartitionColumnStatistics(dbName, tabName, - partName, colName); + ColumnStatistics colStats = + Hive.get().getPartitionColumnStatistics(dbName, tabName, partName, colName); if (colStats != null) { return getColStatistics(colStats.getStatsObj().get(0), tabName, colName); } @@ -339,7 +406,6 @@ public static ColStatistics getParitionColumnStatsForColumn(String dbName, Strin /** * Will return true if column statistics for atleast one column is available - * * @param colStats * - column stats * @return @@ -355,7 +421,6 @@ private static boolean checkIfColStatsAvailable(List colStats) { /** * Get table level column stats for specified column - * * @param dbName * - database name * @param tableName @@ -380,7 +445,6 @@ public static ColStatistics getTableColumnStatsForColumn(String dbName, String t /** * Convert ColumnStatisticsObj to ColStatistics - * * @param cso * - ColumnStatisticsObj * @param tabName @@ -446,7 +510,6 @@ public static ColStatistics getColStatistics(ColumnStatisticsObj cso, String tab /** * Get table level column statistics from metastore for needed columns - * * @param table * - table * @param schema @@ -479,7 +542,6 @@ public static ColStatistics getColStatistics(ColumnStatisticsObj cso, String tab /** * Get the raw data size of variable length data types - * * @param conf * - hive conf * @param oi @@ -533,10 +595,11 @@ public static long getAvgColLenOfVariableLengthTypes(HiveConf conf, ObjectInspec return coi.getWritableConstantValue().toString().length(); } else if (oi instanceof WritableConstantHiveVarcharObjectInspector) { - WritableConstantHiveVarcharObjectInspector wcsoi = (WritableConstantHiveVarcharObjectInspector) oi; + WritableConstantHiveVarcharObjectInspector wcsoi = + (WritableConstantHiveVarcharObjectInspector) oi; return wcsoi.getWritableConstantValue().toString().length(); } else if (oi instanceof WritableHiveVarcharObjectInspector) { - return ((WritableHiveVarcharObjectInspector)oi).getMaxLength(); + return ((WritableHiveVarcharObjectInspector) oi).getMaxLength(); } } else if (colType.startsWith(serdeConstants.CHAR_TYPE_NAME)) { @@ -552,7 +615,8 @@ public static long getAvgColLenOfVariableLengthTypes(HiveConf conf, ObjectInspec return coi.getWritableConstantValue().toString().length(); } else if (oi instanceof WritableConstantHiveCharObjectInspector) { - WritableConstantHiveCharObjectInspector wcsoi = (WritableConstantHiveCharObjectInspector) oi; + WritableConstantHiveCharObjectInspector wcsoi = + (WritableConstantHiveCharObjectInspector) oi; return wcsoi.getWritableConstantValue().toString().length(); } else if (oi instanceof WritableHiveCharObjectInspector) { return ((WritableHiveCharObjectInspector) oi).getMaxLength(); @@ -592,7 +656,6 @@ public static long getAvgColLenOfVariableLengthTypes(HiveConf conf, ObjectInspec /** * Get the size of complex data types - * * @param conf * - hive conf * @param oi @@ -690,7 +753,6 @@ public static long getSizeOfComplexTypes(HiveConf conf, ObjectInspector oi) { /** * Get size of fixed length primitives - * * @param colType * - column type * @return raw data size @@ -718,7 +780,6 @@ public static long getAvgColLenOfFixedLengthTypes(String colType) { /** * Get the size of arrays of primitive types - * * @param colType * - column type * @param length @@ -752,7 +813,6 @@ public static long getSizeOfPrimitiveTypeArraysFromType(String colType, int leng /** * Estimate the size of map object - * * @param scmoi * - object inspector * @return size of map @@ -774,7 +834,6 @@ public static long getSizeOfMap(StandardConstantMapObjectInspector scmoi) { /** * Get size of primitive data types based on their respective writable object inspector - * * @param oi * - object inspector * @param value @@ -817,7 +876,6 @@ public static long getWritableSize(ObjectInspector oi, Object value) { /** * Get column statistics from parent statistics. - * * @param conf * - hive conf * @param parentStats @@ -829,8 +887,7 @@ public static long getWritableSize(ObjectInspector oi, Object value) { * @return column statistics */ public static List getColStatisticsFromExprMap(HiveConf conf, - Statistics parentStats, - Map colExprMap, RowSchema rowSchema) { + Statistics parentStats, Map colExprMap, RowSchema rowSchema) { List cs = Lists.newArrayList(); if (colExprMap != null) { @@ -856,7 +913,6 @@ public static long getWritableSize(ObjectInspector oi, Object value) { /** * Get column statistics expression nodes - * * @param conf * - hive conf * @param parentStats @@ -963,7 +1019,6 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis /** * Get number of rows of a give table - * * @param dbName * - database name * @param tabName @@ -976,7 +1031,6 @@ public static long getNumRows(String dbName, String tabName) { /** * Get raw data size of a give table - * * @param dbName * - database name * @param tabName @@ -989,7 +1043,6 @@ public static long getRawDataSize(String dbName, String tabName) { /** * Get total size of a give table - * * @param dbName * - database name * @param tabName @@ -1002,7 +1055,6 @@ public static long getTotalSize(String dbName, String tabName) { /** * Get basic stats of table - * * @param dbName * - database name * @param tabName @@ -1035,7 +1087,6 @@ public static long getBasicStatForTable(String dbName, String tabName, String st /** * Get basic stats of partitions - * * @param table * - table * @param partNames @@ -1072,7 +1123,6 @@ public static long getBasicStatForTable(String dbName, String tabName, String st /** * Compute raw data size from column statistics - * * @param numRows * - number of rows * @param colStats @@ -1130,7 +1180,6 @@ public static long getDataSizeFromColumnStats(long numRows, List /** * Remove KEY/VALUE prefix from column name - * * @param colName * - column name * @return column name @@ -1146,7 +1195,6 @@ public static String stripPrefixFromColumnName(String colName) { /** * Returns fully qualified name of column - * * @param tabName * @param colName * @return @@ -1157,7 +1205,6 @@ public static String getFullyQualifiedColumnName(String tabName, String colName) /** * Returns fully qualified name of column - * * @param dbName * @param tabName * @param colName @@ -1169,7 +1216,6 @@ public static String getFullyQualifiedColumnName(String dbName, String tabName, /** * Returns fully qualified name of column - * * @param dbName * @param tabName * @param partName @@ -1193,7 +1239,6 @@ private static String getFullyQualifiedName(String... names) { /** * Try to get fully qualified column name from expression node - * * @param keyExprs * - expression nodes * @param map diff --git a/ql/src/test/queries/clientpositive/annotate_stats_filter.q b/ql/src/test/queries/clientpositive/annotate_stats_filter.q index 3f2452e..ec973e1 100644 --- a/ql/src/test/queries/clientpositive/annotate_stats_filter.q +++ b/ql/src/test/queries/clientpositive/annotate_stats_filter.q @@ -27,7 +27,7 @@ analyze table loc_orc compute statistics for columns state,locid,zip,year; -- numRows: 1 rawDataSize: 102 explain extended select * from loc_orc where state='OH'; --- not equals comparison shouldn't affect number of rows. rawDataSize is 792 and not 796 because of rounding off issue with avgColLen. avgColLen uses integers and not double. +-- not equals comparison shouldn't affect number of rows -- numRows: 8 rawDataSize: 804 explain extended select * from loc_orc where state!='OH'; explain extended select * from loc_orc where state<>'OH'; diff --git a/ql/src/test/queries/clientpositive/annotate_stats_part.q b/ql/src/test/queries/clientpositive/annotate_stats_part.q index 257b840..83510e3 100644 --- a/ql/src/test/queries/clientpositive/annotate_stats_part.q +++ b/ql/src/test/queries/clientpositive/annotate_stats_part.q @@ -77,3 +77,9 @@ explain extended select state,locid from loc_orc where year!=2001; -- basicStatState: COMPLETE colStatState: PARTIAL explain extended select * from loc_orc; + +-- This is to test filter expression evaluation on partition column +-- numRows: 2 dataSize: 8 basicStatState: COMPLETE colStatState: COMPLETE +explain extended select locid from loc_orc where locid>0 and year=2001; +explain extended select locid,year from loc_orc where locid>0 and year=2001; +explain extended select * from (select locid,year from loc_orc) test where locid>0 and year=2001; diff --git a/ql/src/test/results/clientpositive/annotate_stats_filter.q.out b/ql/src/test/results/clientpositive/annotate_stats_filter.q.out index 1fcaa99..294d2ba 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_filter.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_filter.q.out @@ -121,7 +121,7 @@ STAGE PLANS: expr: (state = 'OH') type: boolean Statistics: - numRows: 4 dataSize: 396 basicStatsState: COMPLETE colStatsState: NONE + numRows: 4 dataSize: 398 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: state @@ -134,14 +134,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 4 dataSize: 396 basicStatsState: COMPLETE colStatsState: NONE + numRows: 4 dataSize: 398 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 4 dataSize: 396 basicStatsState: COMPLETE colStatsState: NONE + numRows: 4 dataSize: 398 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -350,11 +350,11 @@ STAGE PLANS: limit: -1 -PREHOOK: query: -- not equals comparison shouldn't affect number of rows. rawDataSize is 792 and not 796 because of rounding off issue with avgColLen. avgColLen uses integers and not double. +PREHOOK: query: -- not equals comparison shouldn't affect number of rows -- numRows: 8 rawDataSize: 804 explain extended select * from loc_orc where state!='OH' PREHOOK: type: QUERY -POSTHOOK: query: -- not equals comparison shouldn't affect number of rows. rawDataSize is 792 and not 796 because of rounding off issue with avgColLen. avgColLen uses integers and not double. +POSTHOOK: query: -- not equals comparison shouldn't affect number of rows -- numRows: 8 rawDataSize: 804 explain extended select * from loc_orc where state!='OH' POSTHOOK: type: QUERY @@ -1509,7 +1509,7 @@ STAGE PLANS: expr: ((year = 2001) and year is null) type: boolean Statistics: - numRows: 2 dataSize: 204 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 8 dataSize: 804 basicStatsState: COMPLETE colStatsState: COMPLETE Select Operator expressions: expr: state @@ -1522,14 +1522,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 2 dataSize: 204 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 8 dataSize: 804 basicStatsState: COMPLETE colStatsState: COMPLETE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 2 dataSize: 204 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 8 dataSize: 804 basicStatsState: COMPLETE colStatsState: COMPLETE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1759,7 +1759,7 @@ STAGE PLANS: expr: (((year = 2001) and year is null) or (state = 'CA')) type: boolean Statistics: - numRows: 3 dataSize: 306 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 8 dataSize: 796 basicStatsState: COMPLETE colStatsState: COMPLETE Select Operator expressions: expr: state @@ -1772,14 +1772,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 3 dataSize: 306 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 8 dataSize: 796 basicStatsState: COMPLETE colStatsState: COMPLETE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 3 dataSize: 306 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 8 dataSize: 796 basicStatsState: COMPLETE colStatsState: COMPLETE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1885,7 +1885,7 @@ STAGE PLANS: expr: (((year = 2001) or year is null) and (state = 'CA')) type: boolean Statistics: - numRows: 1 dataSize: 102 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 2 dataSize: 204 basicStatsState: COMPLETE colStatsState: COMPLETE Select Operator expressions: expr: state @@ -1898,14 +1898,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 1 dataSize: 102 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 2 dataSize: 204 basicStatsState: COMPLETE colStatsState: COMPLETE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 1 dataSize: 102 basicStatsState: COMPLETE colStatsState: COMPLETE + numRows: 2 dataSize: 204 basicStatsState: COMPLETE colStatsState: COMPLETE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/annotate_stats_part.q.out b/ql/src/test/results/clientpositive/annotate_stats_part.q.out index 333b8d6..70f279b 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_part.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_part.q.out @@ -194,7 +194,7 @@ STAGE PLANS: TableScan alias: loc_orc Statistics: - numRows: 0 dataSize: 727 basicStatsState: PARTIAL colStatsState: NONE + numRows: 5 dataSize: 727 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -208,7 +208,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 727 basicStatsState: PARTIAL colStatsState: NONE + numRows: 5 dataSize: 727 basicStatsState: COMPLETE colStatsState: NONE ListSink @@ -298,7 +298,7 @@ STAGE PLANS: TableScan alias: loc_orc Statistics: - numRows: 0 dataSize: 325 basicStatsState: PARTIAL colStatsState: NONE + numRows: 2 dataSize: 325 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -312,7 +312,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 325 basicStatsState: PARTIAL colStatsState: NONE + numRows: 2 dataSize: 325 basicStatsState: COMPLETE colStatsState: NONE ListSink @@ -421,7 +421,7 @@ STAGE PLANS: TableScan alias: loc_orc Statistics: - numRows: 7 dataSize: 727 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 727 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -435,7 +435,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 7 dataSize: 727 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 727 basicStatsState: COMPLETE colStatsState: NONE ListSink @@ -1555,7 +1555,7 @@ STAGE PLANS: expr: (year <> 2001) type: boolean Statistics: - numRows: 0 dataSize: 162 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 325 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: state @@ -1564,14 +1564,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 162 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 325 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 162 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 325 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1762,3 +1762,356 @@ STAGE PLANS: ListSink +PREHOOK: query: -- This is to test filter expression evaluation on partition column +-- numRows: 2 dataSize: 8 basicStatState: COMPLETE colStatState: COMPLETE +explain extended select locid from loc_orc where locid>0 and year=2001 +PREHOOK: type: QUERY +POSTHOOK: query: -- This is to test filter expression evaluation on partition column +-- numRows: 2 dataSize: 8 basicStatState: COMPLETE colStatState: COMPLETE +explain extended select locid from loc_orc where locid>0 and year=2001 +POSTHOOK: type: QUERY +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).locid SIMPLE [(loc_staging)loc_staging.FieldSchema(name:locid, type:int, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).state SIMPLE [(loc_staging)loc_staging.FieldSchema(name:state, type:string, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).zip SIMPLE [(loc_staging)loc_staging.FieldSchema(name:zip, type:bigint, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).locid SIMPLE [(loc_staging)loc_staging.FieldSchema(name:locid, type:int, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).state SIMPLE [(loc_staging)loc_staging.FieldSchema(name:state, type:string, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).zip SIMPLE [(loc_staging)loc_staging.FieldSchema(name:zip, type:bigint, comment:null), ] +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME loc_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL locid))) (TOK_WHERE (and (> (TOK_TABLE_OR_COL locid) 0) (= (TOK_TABLE_OR_COL year) 2001))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + loc_orc + TableScan + alias: loc_orc + Statistics: + numRows: 7 dataSize: 402 basicStatsState: COMPLETE colStatsState: COMPLETE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: + expr: (locid > 0) + type: boolean + Statistics: + numRows: 2 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE + Select Operator + expressions: + expr: locid + type: int + outputColumnNames: _col0 + Statistics: + numRows: 2 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: + numRows: 2 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0 + columns.types int + escape.delim \ + hive.serialization.extend.nesting.levels true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: year=2001 + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + partition values: + year 2001 + properties: + COLUMN_STATS_ACCURATE true + bucket_count -1 + columns state,locid,zip + columns.types string:int:bigint +#### A masked pattern was here #### + name default.loc_orc + numFiles 1 + numRows 7 + partition_columns year + rawDataSize 0 + serialization.ddl struct loc_orc { string state, i32 locid, i64 zip} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 402 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + properties: + bucket_count -1 + columns state,locid,zip + columns.types string:int:bigint +#### A masked pattern was here #### + name default.loc_orc + partition_columns year + serialization.ddl struct loc_orc { string state, i32 locid, i64 zip} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: default.loc_orc + name: default.loc_orc + Truncated Path -> Alias: + /loc_orc/year=2001 [loc_orc] + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: explain extended select locid,year from loc_orc where locid>0 and year=2001 +PREHOOK: type: QUERY +POSTHOOK: query: explain extended select locid,year from loc_orc where locid>0 and year=2001 +POSTHOOK: type: QUERY +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).locid SIMPLE [(loc_staging)loc_staging.FieldSchema(name:locid, type:int, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).state SIMPLE [(loc_staging)loc_staging.FieldSchema(name:state, type:string, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).zip SIMPLE [(loc_staging)loc_staging.FieldSchema(name:zip, type:bigint, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).locid SIMPLE [(loc_staging)loc_staging.FieldSchema(name:locid, type:int, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).state SIMPLE [(loc_staging)loc_staging.FieldSchema(name:state, type:string, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).zip SIMPLE [(loc_staging)loc_staging.FieldSchema(name:zip, type:bigint, comment:null), ] +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME loc_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL locid)) (TOK_SELEXPR (TOK_TABLE_OR_COL year))) (TOK_WHERE (and (> (TOK_TABLE_OR_COL locid) 0) (= (TOK_TABLE_OR_COL year) 2001))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + loc_orc + TableScan + alias: loc_orc + Statistics: + numRows: 7 dataSize: 402 basicStatsState: COMPLETE colStatsState: COMPLETE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: + expr: (locid > 0) + type: boolean + Statistics: + numRows: 2 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE + Select Operator + expressions: + expr: locid + type: int + expr: year + type: string + outputColumnNames: _col0, _col1 + Statistics: + numRows: 2 dataSize: 376 basicStatsState: COMPLETE colStatsState: COMPLETE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: + numRows: 2 dataSize: 376 basicStatsState: COMPLETE colStatsState: COMPLETE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types int:string + escape.delim \ + hive.serialization.extend.nesting.levels true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: year=2001 + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + partition values: + year 2001 + properties: + COLUMN_STATS_ACCURATE true + bucket_count -1 + columns state,locid,zip + columns.types string:int:bigint +#### A masked pattern was here #### + name default.loc_orc + numFiles 1 + numRows 7 + partition_columns year + rawDataSize 0 + serialization.ddl struct loc_orc { string state, i32 locid, i64 zip} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 402 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + properties: + bucket_count -1 + columns state,locid,zip + columns.types string:int:bigint +#### A masked pattern was here #### + name default.loc_orc + partition_columns year + serialization.ddl struct loc_orc { string state, i32 locid, i64 zip} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: default.loc_orc + name: default.loc_orc + Truncated Path -> Alias: + /loc_orc/year=2001 [loc_orc] + + Stage: Stage-0 + Fetch Operator + limit: -1 + + +PREHOOK: query: explain extended select * from (select locid,year from loc_orc) test where locid>0 and year=2001 +PREHOOK: type: QUERY +POSTHOOK: query: explain extended select * from (select locid,year from loc_orc) test where locid>0 and year=2001 +POSTHOOK: type: QUERY +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).locid SIMPLE [(loc_staging)loc_staging.FieldSchema(name:locid, type:int, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).state SIMPLE [(loc_staging)loc_staging.FieldSchema(name:state, type:string, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=2001).zip SIMPLE [(loc_staging)loc_staging.FieldSchema(name:zip, type:bigint, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).locid SIMPLE [(loc_staging)loc_staging.FieldSchema(name:locid, type:int, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).state SIMPLE [(loc_staging)loc_staging.FieldSchema(name:state, type:string, comment:null), ] +POSTHOOK: Lineage: loc_orc PARTITION(year=__HIVE_DEFAULT_PARTITION__).zip SIMPLE [(loc_staging)loc_staging.FieldSchema(name:zip, type:bigint, comment:null), ] +ABSTRACT SYNTAX TREE: + (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME loc_orc))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL locid)) (TOK_SELEXPR (TOK_TABLE_OR_COL year))))) test)) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR TOK_ALLCOLREF)) (TOK_WHERE (and (> (TOK_TABLE_OR_COL locid) 0) (= (TOK_TABLE_OR_COL year) 2001))))) + +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Alias -> Map Operator Tree: + test:loc_orc + TableScan + alias: loc_orc + Statistics: + numRows: 7 dataSize: 402 basicStatsState: COMPLETE colStatsState: COMPLETE + GatherStats: false + Filter Operator + isSamplingPred: false + predicate: + expr: (locid > 0) + type: boolean + Statistics: + numRows: 2 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE + Select Operator + expressions: + expr: locid + type: int + expr: year + type: string + outputColumnNames: _col0, _col1 + Statistics: + numRows: 2 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE + File Output Operator + compressed: false + GlobalTableId: 0 +#### A masked pattern was here #### + NumFilesPerFileSink: 1 + Statistics: + numRows: 2 dataSize: 8 basicStatsState: COMPLETE colStatsState: COMPLETE +#### A masked pattern was here #### + table: + input format: org.apache.hadoop.mapred.TextInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + properties: + columns _col0,_col1 + columns.types int:string + escape.delim \ + hive.serialization.extend.nesting.levels true + serialization.format 1 + serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TotalFiles: 1 + GatherStats: false + MultiFileSpray: false + Path -> Alias: +#### A masked pattern was here #### + Path -> Partition: +#### A masked pattern was here #### + Partition + base file name: year=2001 + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + partition values: + year 2001 + properties: + COLUMN_STATS_ACCURATE true + bucket_count -1 + columns state,locid,zip + columns.types string:int:bigint +#### A masked pattern was here #### + name default.loc_orc + numFiles 1 + numRows 7 + partition_columns year + rawDataSize 0 + serialization.ddl struct loc_orc { string state, i32 locid, i64 zip} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 402 +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + + input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat + output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat + properties: + bucket_count -1 + columns state,locid,zip + columns.types string:int:bigint +#### A masked pattern was here #### + name default.loc_orc + partition_columns year + serialization.ddl struct loc_orc { string state, i32 locid, i64 zip} + serialization.format 1 + serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde +#### A masked pattern was here #### + serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde + name: default.loc_orc + name: default.loc_orc + Truncated Path -> Alias: + /loc_orc/year=2001 [test:loc_orc] + + Stage: Stage-0 + Fetch Operator + limit: -1 + + diff --git a/ql/src/test/results/clientpositive/annotate_stats_table.q.out b/ql/src/test/results/clientpositive/annotate_stats_table.q.out index 64e45c7..1dff909 100644 --- a/ql/src/test/results/clientpositive/annotate_stats_table.q.out +++ b/ql/src/test/results/clientpositive/annotate_stats_table.q.out @@ -98,7 +98,7 @@ STAGE PLANS: TableScan alias: emp_orc Statistics: - numRows: 0 dataSize: 349 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 349 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -108,7 +108,7 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 349 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 349 basicStatsState: COMPLETE colStatsState: NONE ListSink diff --git a/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out b/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out index 68b77da..9638983 100644 --- a/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out +++ b/ql/src/test/results/clientpositive/auto_join_reordering_values.q.out @@ -210,7 +210,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col3, _col4, _col8 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -244,7 +244,7 @@ STAGE PLANS: expr: _col0 type: int Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: _col8 @@ -352,7 +352,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col1, _col10, _col11, _col14 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 42 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -386,7 +386,7 @@ STAGE PLANS: expr: _col10 type: int Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 42 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: _col14 @@ -491,7 +491,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col1, _col7, _col18 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 46 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -525,7 +525,7 @@ STAGE PLANS: expr: _col18 type: int Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 46 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: _col7 @@ -628,7 +628,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col1, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 110 dataSize: 316 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col1 @@ -637,17 +637,17 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 110 dataSize: 316 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 5 dataSize: 10 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 5 dataSize: 10 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out index cdec74e..3dd1ec0 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out @@ -98,7 +98,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -293,7 +293,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -1075,7 +1075,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out index df97bdd..fa5183d 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out @@ -142,7 +142,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 114 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -161,7 +161,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -176,7 +176,7 @@ STAGE PLANS: Position of Big Table: 1 Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -467,7 +467,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 114 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -486,7 +486,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -501,7 +501,7 @@ STAGE PLANS: Position of Big Table: 1 Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -785,7 +785,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 114 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -811,7 +811,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -825,11 +825,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out index fdcdad9..998510d 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out @@ -265,7 +265,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 114 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 114 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -282,7 +282,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 170 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 170 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -318,7 +318,7 @@ STAGE PLANS: TableScan alias: c Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -347,7 +347,7 @@ STAGE PLANS: Position of Big Table: 0 Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 280 dataSize: 28129 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out index 5b1bbcf..02bbfca 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out @@ -82,7 +82,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -866,7 +866,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out index 50a382a..b435b2e 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out @@ -82,7 +82,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -232,7 +232,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -966,7 +966,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out index 1495d2f..a63632e 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out @@ -94,7 +94,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -244,7 +244,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -978,7 +978,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out index fa731bc..95a3c40 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out @@ -67,7 +67,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -213,7 +213,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -721,7 +721,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out index 1c0020f..031f4a8 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out @@ -107,7 +107,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -304,7 +304,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -1217,7 +1217,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out index a8e60c8..e963fb5 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out @@ -107,7 +107,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -304,7 +304,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: @@ -1219,7 +1219,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/binary_output_format.q.out b/ql/src/test/results/clientpositive/binary_output_format.q.out index 51a3983..95a0213 100644 --- a/ql/src/test/results/clientpositive/binary_output_format.q.out +++ b/ql/src/test/results/clientpositive/binary_output_format.q.out @@ -70,7 +70,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -80,7 +80,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Transform Operator command: cat output info: @@ -95,14 +95,14 @@ STAGE PLANS: serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucket1.q.out b/ql/src/test/results/clientpositive/bucket1.q.out index bfce6d5..6555dd6 100644 --- a/ql/src/test/results/clientpositive/bucket1.q.out +++ b/ql/src/test/results/clientpositive/bucket1.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -37,14 +37,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator sort order: Map-reduce partition columns: expr: UDFToInteger(_col0) type: int Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -102,7 +102,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -111,14 +111,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucket2.q.out b/ql/src/test/results/clientpositive/bucket2.q.out index 95a3a7a..7778d25 100644 --- a/ql/src/test/results/clientpositive/bucket2.q.out +++ b/ql/src/test/results/clientpositive/bucket2.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -37,14 +37,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator sort order: Map-reduce partition columns: expr: UDFToInteger(_col0) type: int Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -102,7 +102,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -111,14 +111,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucket3.q.out b/ql/src/test/results/clientpositive/bucket3.q.out index bdbbf23..e38df6e 100644 --- a/ql/src/test/results/clientpositive/bucket3.q.out +++ b/ql/src/test/results/clientpositive/bucket3.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -37,14 +37,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator sort order: Map-reduce partition columns: expr: UDFToInteger(_col0) type: int Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -102,7 +102,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -111,7 +111,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 @@ -119,7 +119,7 @@ STAGE PLANS: NumFilesPerFileSink: 2 Static Partition Specification: ds=1/ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucket4.q.out b/ql/src/test/results/clientpositive/bucket4.q.out index f8e48a2..2b47edb 100644 --- a/ql/src/test/results/clientpositive/bucket4.q.out +++ b/ql/src/test/results/clientpositive/bucket4.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -37,7 +37,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: UDFToInteger(_col0) @@ -47,7 +47,7 @@ STAGE PLANS: expr: UDFToInteger(_col0) type: int Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -105,7 +105,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -114,14 +114,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucket5.q.out b/ql/src/test/results/clientpositive/bucket5.q.out index 391ad61..f3620f6 100644 --- a/ql/src/test/results/clientpositive/bucket5.q.out +++ b/ql/src/test/results/clientpositive/bucket5.q.out @@ -48,7 +48,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -58,7 +58,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: UDFToInteger(_col0) @@ -68,7 +68,7 @@ STAGE PLANS: expr: UDFToInteger(_col0) type: int Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -83,7 +83,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -152,7 +152,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -161,14 +161,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -234,7 +234,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -270,7 +270,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -279,14 +279,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 2 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out index b373098..45883f7 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out @@ -99,10 +99,10 @@ STAGE PLANS: 1 [Column[key], Column[value]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 0 dataSize: 23 basicStatsState: PARTIAL colStatsState: NONE Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 0 dataSize: 23 basicStatsState: PARTIAL colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out index 02e002c..f3210b1 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out @@ -99,10 +99,10 @@ STAGE PLANS: 1 [Column[key], Column[value]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 0 dataSize: 23 basicStatsState: PARTIAL colStatsState: NONE Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 0 dataSize: 23 basicStatsState: PARTIAL colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/bucketcontext_1.q.out b/ql/src/test/results/clientpositive/bucketcontext_1.q.out index 0c310fc..30533d9 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_1.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_1.q.out @@ -140,7 +140,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -166,7 +166,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -180,11 +180,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -378,7 +378,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/bucketcontext_2.q.out b/ql/src/test/results/clientpositive/bucketcontext_2.q.out index 3ac151a..67a7809 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_2.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_2.q.out @@ -128,7 +128,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -154,7 +154,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -168,11 +168,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -366,7 +366,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/bucketcontext_3.q.out b/ql/src/test/results/clientpositive/bucketcontext_3.q.out index ca49323..a553447 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_3.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_3.q.out @@ -170,7 +170,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -196,7 +196,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -210,11 +210,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -363,7 +363,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/bucketcontext_4.q.out b/ql/src/test/results/clientpositive/bucketcontext_4.q.out index 65efd15..0389011 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_4.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_4.q.out @@ -182,7 +182,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -208,7 +208,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -222,11 +222,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -375,7 +375,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/bucketcontext_5.q.out b/ql/src/test/results/clientpositive/bucketcontext_5.q.out index 04dda46..f9ebcb6 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_5.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_5.q.out @@ -70,7 +70,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -96,7 +96,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -110,11 +110,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -259,7 +259,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/bucketcontext_6.q.out b/ql/src/test/results/clientpositive/bucketcontext_6.q.out index b263c4c..a9ab456 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_6.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_6.q.out @@ -84,7 +84,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -110,7 +110,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -124,11 +124,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -320,7 +320,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/bucketcontext_7.q.out b/ql/src/test/results/clientpositive/bucketcontext_7.q.out index bbe3973..172973f 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_7.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_7.q.out @@ -195,7 +195,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -221,7 +221,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -235,11 +235,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -435,7 +435,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/bucketcontext_8.q.out b/ql/src/test/results/clientpositive/bucketcontext_8.q.out index 856e900..51a82c9 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_8.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_8.q.out @@ -195,7 +195,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5500 basicStatsState: PARTIAL colStatsState: NONE + numRows: 54 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -221,7 +221,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -235,11 +235,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -435,7 +435,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Sorted Merge Bucket Map Join Operator condition map: diff --git a/ql/src/test/results/clientpositive/bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/bucketmapjoin1.q.out index a477a5d..8e4b76d 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin1.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin1.q.out @@ -426,7 +426,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -452,7 +452,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -467,7 +467,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -479,14 +479,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -927,7 +927,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -953,7 +953,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -968,7 +968,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -980,14 +980,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out index 4797b06..45050bb 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out @@ -217,7 +217,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 6950 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1737 dataSize: 6950 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -236,7 +236,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 6950 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1737 dataSize: 6950 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -250,10 +250,10 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1910 dataSize: 7645 basicStatsState: COMPLETE colStatsState: NONE Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1910 dataSize: 7645 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out index 0ca326e..1938552 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out @@ -223,7 +223,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 8562 basicStatsState: PARTIAL colStatsState: NONE + numRows: 2140 dataSize: 8562 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -249,7 +249,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 8562 basicStatsState: PARTIAL colStatsState: NONE + numRows: 2140 dataSize: 8562 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -263,11 +263,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 2354 dataSize: 9418 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 2354 dataSize: 9418 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -559,7 +559,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 8562 basicStatsState: PARTIAL colStatsState: NONE + numRows: 2140 dataSize: 8562 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -585,7 +585,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 8562 basicStatsState: PARTIAL colStatsState: NONE + numRows: 2140 dataSize: 8562 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -599,11 +599,11 @@ STAGE PLANS: 1 [Column[key], Column[part]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 2354 dataSize: 9418 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 2354 dataSize: 9418 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out index 2bfaa87..0ff3294 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out @@ -149,7 +149,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -175,7 +175,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -189,11 +189,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -399,7 +399,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -418,7 +418,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -432,10 +432,10 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/bucketmapjoin13.q.out b/ql/src/test/results/clientpositive/bucketmapjoin13.q.out index 25efe6a..b632c0b 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin13.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin13.q.out @@ -189,10 +189,10 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 250000 dataSize: 5000000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 1100 dataSize: 11686 basicStatsState: COMPLETE colStatsState: NONE Select Operator Statistics: - numRows: 250000 dataSize: 5000000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 1100 dataSize: 11686 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -497,11 +497,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -774,11 +774,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -1053,11 +1053,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/bucketmapjoin2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin2.q.out index 3f9744f..4c18bbd 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin2.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin2.q.out @@ -141,7 +141,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 3062 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 3062 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -167,7 +167,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -182,7 +182,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col6 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -194,14 +194,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -687,7 +687,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -713,7 +713,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 3062 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 3062 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -728,7 +728,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col6 Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -740,14 +740,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1473,7 +1473,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 6124 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 6124 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -1499,7 +1499,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -1514,7 +1514,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col6 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -1526,14 +1526,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin3.q.out index 45b911e..1560192 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin3.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin3.q.out @@ -158,7 +158,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -184,7 +184,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 3062 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 3062 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -199,7 +199,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col6 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -211,14 +211,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -704,7 +704,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 3062 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 3062 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -730,7 +730,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -745,7 +745,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col6 Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -757,14 +757,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/bucketmapjoin4.q.out index 81f6117..d1ffcb8 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin4.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin4.q.out @@ -116,7 +116,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -142,7 +142,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -157,7 +157,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 28 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -169,14 +169,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 28 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 28 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -609,7 +609,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -635,7 +635,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -650,7 +650,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 28 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -662,14 +662,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 28 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 28 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out index a63c058..d3b16cc 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out @@ -154,7 +154,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -180,7 +180,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 110 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -195,7 +195,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 121 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -207,14 +207,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 121 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 121 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -702,7 +702,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -728,7 +728,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 6124 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 6124 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -743,7 +743,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -755,14 +755,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucketmapjoin7.q.out b/ql/src/test/results/clientpositive/bucketmapjoin7.q.out index 0ffbd85..e388787 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin7.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin7.q.out @@ -115,7 +115,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -141,7 +141,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -156,7 +156,7 @@ STAGE PLANS: outputColumnNames: _col0, _col7 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -166,17 +166,17 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 4 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 4 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out index 9deb27e..9ef3a9e 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out @@ -122,7 +122,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -148,7 +148,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -162,11 +162,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -381,7 +381,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -407,7 +407,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -421,11 +421,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out index 9a85e10..13da240 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out @@ -128,7 +128,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1050 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -147,7 +147,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -161,10 +161,10 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1155 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1155 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() @@ -408,7 +408,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -427,7 +427,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 687 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -441,10 +441,10 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 755 dataSize: 3025 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out index 0327666..991fee4 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out @@ -124,7 +124,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 40 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -143,7 +143,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -158,7 +158,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 44 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -169,14 +169,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 44 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 44 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out index 6fb0ed8..ec9bdbe 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out @@ -172,7 +172,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 6124 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 6124 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -198,7 +198,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -213,7 +213,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -225,14 +225,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6736 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out index bdefd50..ba13809 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out @@ -132,7 +132,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -158,7 +158,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -173,7 +173,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -187,14 +187,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -293,7 +293,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -319,7 +319,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -334,7 +334,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -348,14 +348,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -456,7 +456,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -475,7 +475,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -490,7 +490,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -503,14 +503,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -609,7 +609,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -628,7 +628,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -643,7 +643,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -656,14 +656,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -762,7 +762,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -781,7 +781,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -796,7 +796,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -809,14 +809,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -915,7 +915,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -934,7 +934,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -949,7 +949,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -962,14 +962,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1068,7 +1068,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -1087,7 +1087,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -1102,7 +1102,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1115,14 +1115,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1221,7 +1221,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -1240,7 +1240,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -1255,7 +1255,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1268,14 +1268,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1374,7 +1374,7 @@ STAGE PLANS: TableScan alias: r Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -1393,7 +1393,7 @@ STAGE PLANS: TableScan alias: l Statistics: - numRows: 0 dataSize: 4200 basicStatsState: PARTIAL colStatsState: NONE + numRows: 21 dataSize: 4200 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -1408,7 +1408,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1421,14 +1421,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 23 dataSize: 4620 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/ctas_hadoop20.q.out b/ql/src/test/results/clientpositive/ctas_hadoop20.q.out index 45d79a5..8c6e3c8 100644 --- a/ql/src/test/results/clientpositive/ctas_hadoop20.q.out +++ b/ql/src/test/results/clientpositive/ctas_hadoop20.q.out @@ -755,7 +755,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -765,7 +765,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -774,7 +774,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -832,10 +832,10 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 10 dataSize: 2000 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -868,7 +868,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 10 dataSize: 2000 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -904,17 +904,17 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 10 dataSize: 2000 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 10 dataSize: 2000 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 10 dataSize: 2000 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out index 95a3a7a..7778d25 100644 --- a/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out +++ b/ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -37,14 +37,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator sort order: Map-reduce partition columns: expr: UDFToInteger(_col0) type: int Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -102,7 +102,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -111,14 +111,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/filter_join_breaktask.q.out b/ql/src/test/results/clientpositive/filter_join_breaktask.q.out index edf48b1..0bd54bb 100644 --- a/ql/src/test/results/clientpositive/filter_join_breaktask.q.out +++ b/ql/src/test/results/clientpositive/filter_join_breaktask.q.out @@ -51,7 +51,7 @@ STAGE PLANS: expr: key is not null type: boolean Statistics: - numRows: 12 dataSize: 96 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 109 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -61,7 +61,7 @@ STAGE PLANS: expr: key type: int Statistics: - numRows: 12 dataSize: 96 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 109 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -78,7 +78,7 @@ STAGE PLANS: expr: ((key is not null and value is not null) and (value <> '')) type: boolean Statistics: - numRows: 12 dataSize: 96 basicStatsState: COMPLETE colStatsState: NONE + numRows: 7 dataSize: 59 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -88,7 +88,7 @@ STAGE PLANS: expr: key type: int Statistics: - numRows: 12 dataSize: 96 basicStatsState: COMPLETE colStatsState: NONE + numRows: 7 dataSize: 59 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: value @@ -150,7 +150,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col6 Statistics: - numRows: 72 dataSize: 1152 basicStatsState: COMPLETE colStatsState: NONE + numRows: 14 dataSize: 119 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -184,7 +184,7 @@ STAGE PLANS: expr: _col6 type: string Statistics: - numRows: 72 dataSize: 1152 basicStatsState: COMPLETE colStatsState: NONE + numRows: 14 dataSize: 119 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: _col0 @@ -201,7 +201,7 @@ STAGE PLANS: expr: (value <> '') type: boolean Statistics: - numRows: 12 dataSize: 96 basicStatsState: COMPLETE colStatsState: NONE + numRows: 25 dataSize: 211 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: value @@ -211,7 +211,7 @@ STAGE PLANS: expr: value type: string Statistics: - numRows: 12 dataSize: 96 basicStatsState: COMPLETE colStatsState: NONE + numRows: 25 dataSize: 211 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: value @@ -294,7 +294,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col11 Statistics: - numRows: 432 dataSize: 10368 basicStatsState: COMPLETE colStatsState: NONE + numRows: 27 dataSize: 232 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -303,14 +303,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 432 dataSize: 10368 basicStatsState: COMPLETE colStatsState: NONE + numRows: 27 dataSize: 232 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 432 dataSize: 10368 basicStatsState: COMPLETE colStatsState: NONE + numRows: 27 dataSize: 232 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/groupby_map_ppr.q.out b/ql/src/test/results/clientpositive/groupby_map_ppr.q.out index 840fc63..973fad7 100644 --- a/ql/src/test/results/clientpositive/groupby_map_ppr.q.out +++ b/ql/src/test/results/clientpositive/groupby_map_ppr.q.out @@ -33,7 +33,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -43,7 +43,7 @@ STAGE PLANS: type: string outputColumnNames: key, value Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count(DISTINCT substr(value, 5)) @@ -57,7 +57,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -69,7 +69,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col2 @@ -179,7 +179,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -190,14 +190,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out index f3c3695..059f1ea 100644 --- a/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out @@ -33,7 +33,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -43,7 +43,7 @@ STAGE PLANS: type: string outputColumnNames: key, value Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count(DISTINCT substr(value, 5)) @@ -61,7 +61,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -75,7 +75,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col3 @@ -191,7 +191,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -206,14 +206,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/groupby_ppr.q.out b/ql/src/test/results/clientpositive/groupby_ppr.q.out index 0e74462..a43d749 100644 --- a/ql/src/test/results/clientpositive/groupby_ppr.q.out +++ b/ql/src/test/results/clientpositive/groupby_ppr.q.out @@ -33,7 +33,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -43,7 +43,7 @@ STAGE PLANS: type: string outputColumnNames: key, value Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: substr(key, 1, 1) @@ -55,7 +55,7 @@ STAGE PLANS: expr: substr(key, 1, 1) type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE tag: -1 Path -> Alias: #### A masked pattern was here #### @@ -160,7 +160,7 @@ STAGE PLANS: mode: complete outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -171,14 +171,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out index 32433f0..05ee01b 100644 --- a/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out @@ -33,7 +33,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -43,7 +43,7 @@ STAGE PLANS: type: string outputColumnNames: key, value Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: substr(key, 1, 1) @@ -57,7 +57,7 @@ STAGE PLANS: expr: substr(key, 1, 1) type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE tag: -1 Path -> Alias: #### A masked pattern was here #### @@ -164,7 +164,7 @@ STAGE PLANS: mode: complete outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -179,14 +179,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/groupby_sort_1.q.out b/ql/src/test/results/clientpositive/groupby_sort_1.q.out index daa6fd6..2ad3eba 100644 --- a/ql/src/test/results/clientpositive/groupby_sort_1.q.out +++ b/ql/src/test/results/clientpositive/groupby_sort_1.q.out @@ -4215,7 +4215,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col3 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -4224,14 +4224,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -4732,7 +4732,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -4747,14 +4747,14 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/groupby_sort_6.q.out b/ql/src/test/results/clientpositive/groupby_sort_6.q.out index 427aeac..417c241 100644 --- a/ql/src/test/results/clientpositive/groupby_sort_6.q.out +++ b/ql/src/test/results/clientpositive/groupby_sort_6.q.out @@ -487,7 +487,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 15 basicStatsState: PARTIAL colStatsState: NONE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -496,14 +496,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 15 basicStatsState: PARTIAL colStatsState: NONE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 15 basicStatsState: PARTIAL colStatsState: NONE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out b/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out index ce4596c..dbc0a13 100644 --- a/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out +++ b/ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out @@ -4616,7 +4616,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col3 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -4625,14 +4625,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -5214,7 +5214,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -5229,14 +5229,14 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 4 dataSize: 32 basicStatsState: COMPLETE colStatsState: NONE + numRows: 3 dataSize: 13 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/input23.q.out b/ql/src/test/results/clientpositive/input23.q.out index d253139..acd9159 100644 --- a/ql/src/test/results/clientpositive/input23.q.out +++ b/ql/src/test/results/clientpositive/input23.q.out @@ -19,12 +19,12 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator sort order: Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -120,7 +120,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7, _col8, _col9 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 31 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -141,17 +141,17 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 31 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 5 dataSize: 1030 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 5 dataSize: 1030 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/input42.q.out b/ql/src/test/results/clientpositive/input42.q.out index abd74ac..c350830 100644 --- a/ql/src/test/results/clientpositive/input42.q.out +++ b/ql/src/test/results/clientpositive/input42.q.out @@ -19,7 +19,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -33,7 +33,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -42,7 +42,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -147,14 +147,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1209,7 +1209,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -1217,7 +1217,7 @@ STAGE PLANS: expr: (key < 200) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -1230,7 +1230,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1239,7 +1239,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1344,14 +1344,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1784,7 +1784,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -1792,7 +1792,7 @@ STAGE PLANS: expr: (rand(100) < 0.1) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -1805,7 +1805,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1814,7 +1814,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1919,14 +1919,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/input_part1.q.out b/ql/src/test/results/clientpositive/input_part1.q.out index ae00e59..93cab77 100644 --- a/ql/src/test/results/clientpositive/input_part1.q.out +++ b/ql/src/test/results/clientpositive/input_part1.q.out @@ -32,7 +32,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -40,7 +40,7 @@ STAGE PLANS: expr: (key < 100) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(key) @@ -53,14 +53,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/input_part2.q.out b/ql/src/test/results/clientpositive/input_part2.q.out index 907de17..2f9472e 100644 --- a/ql/src/test/results/clientpositive/input_part2.q.out +++ b/ql/src/test/results/clientpositive/input_part2.q.out @@ -46,7 +46,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -54,7 +54,7 @@ STAGE PLANS: expr: ((key < 100) and (ds = '2008-04-08')) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(key) @@ -67,14 +67,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -100,7 +100,7 @@ STAGE PLANS: expr: ((key < 100) and (ds = '2008-04-09')) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(key) @@ -113,14 +113,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 2 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/input_part7.q.out b/ql/src/test/results/clientpositive/input_part7.q.out index 88826e6..619afb4 100644 --- a/ql/src/test/results/clientpositive/input_part7.q.out +++ b/ql/src/test/results/clientpositive/input_part7.q.out @@ -29,7 +29,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -37,7 +37,7 @@ STAGE PLANS: expr: (key < 100) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -50,10 +50,10 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -66,7 +66,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -79,7 +79,7 @@ STAGE PLANS: type: string sort order: ++++ Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -94,7 +94,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -102,7 +102,7 @@ STAGE PLANS: expr: (key < 100) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -115,10 +115,10 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -131,7 +131,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -144,7 +144,7 @@ STAGE PLANS: type: string sort order: ++++ Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -249,14 +249,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/input_part9.q.out b/ql/src/test/results/clientpositive/input_part9.q.out index 7f513de..fa8e6db 100644 --- a/ql/src/test/results/clientpositive/input_part9.q.out +++ b/ql/src/test/results/clientpositive/input_part9.q.out @@ -19,7 +19,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -27,7 +27,7 @@ STAGE PLANS: expr: key is not null type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -40,7 +40,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -49,7 +49,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -154,14 +154,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join17.q.out b/ql/src/test/results/clientpositive/join17.q.out index 543e01a..920707c 100644 --- a/ql/src/test/results/clientpositive/join17.q.out +++ b/ql/src/test/results/clientpositive/join17.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -38,7 +38,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -49,7 +49,7 @@ STAGE PLANS: TableScan alias: src2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -60,7 +60,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -125,7 +125,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 31 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -138,14 +138,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 31 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 31 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join26.q.out b/ql/src/test/results/clientpositive/join26.q.out index dd8292f..a001f26 100644 --- a/ql/src/test/results/clientpositive/join26.q.out +++ b/ql/src/test/results/clientpositive/join26.q.out @@ -44,7 +44,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 2 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -61,7 +61,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -82,7 +82,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -100,7 +100,7 @@ STAGE PLANS: outputColumnNames: _col0, _col5, _col9 Position of Big Table: 2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -111,14 +111,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join32.q.out b/ql/src/test/results/clientpositive/join32.q.out index 4372696..43c186d 100644 --- a/ql/src/test/results/clientpositive/join32.q.out +++ b/ql/src/test/results/clientpositive/join32.q.out @@ -85,7 +85,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -100,7 +100,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -119,7 +119,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -155,14 +155,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join32_lessSize.q.out b/ql/src/test/results/clientpositive/join32_lessSize.q.out index 6add446..ba25187 100644 --- a/ql/src/test/results/clientpositive/join32_lessSize.q.out +++ b/ql/src/test/results/clientpositive/join32_lessSize.q.out @@ -47,7 +47,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -66,7 +66,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -242,7 +242,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -282,14 +282,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -571,7 +571,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -590,7 +590,7 @@ STAGE PLANS: TableScan alias: w Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -1398,7 +1398,7 @@ STAGE PLANS: expr: _col4 type: string Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: _col4 @@ -1407,7 +1407,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -1418,7 +1418,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: value @@ -1427,7 +1427,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -1438,7 +1438,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE tag: 2 value expressions: expr: value @@ -1567,7 +1567,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col4, _col9, _col13 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 138 dataSize: 14064 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col4 @@ -1578,14 +1578,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 138 dataSize: 14064 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 138 dataSize: 14064 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1776,7 +1776,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -1795,7 +1795,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -1817,7 +1817,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -1980,7 +1980,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -2020,14 +2020,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 69 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 69 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2319,7 +2319,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -2338,7 +2338,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -2360,7 +2360,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -2523,7 +2523,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -2563,14 +2563,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 69 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 69 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join33.q.out b/ql/src/test/results/clientpositive/join33.q.out index 4372696..43c186d 100644 --- a/ql/src/test/results/clientpositive/join33.q.out +++ b/ql/src/test/results/clientpositive/join33.q.out @@ -85,7 +85,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -100,7 +100,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -119,7 +119,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -155,14 +155,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 7032 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join34.q.out b/ql/src/test/results/clientpositive/join34.q.out index cf6e2bc..a15d350 100644 --- a/ql/src/test/results/clientpositive/join34.q.out +++ b/ql/src/test/results/clientpositive/join34.q.out @@ -48,7 +48,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -67,7 +67,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -75,7 +75,7 @@ STAGE PLANS: expr: (key < 20) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -84,10 +84,10 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 3606 basicStatsState: COMPLETE colStatsState: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -110,14 +110,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 19 dataSize: 3966 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 19 dataSize: 3966 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -141,7 +141,7 @@ STAGE PLANS: TableScan alias: x1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -149,7 +149,7 @@ STAGE PLANS: expr: (key > 100) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -158,10 +158,10 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 3606 basicStatsState: COMPLETE colStatsState: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -184,14 +184,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 19 dataSize: 3966 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 19 dataSize: 3966 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join35.q.out b/ql/src/test/results/clientpositive/join35.q.out index 9dd490a..7a572d4 100644 --- a/ql/src/test/results/clientpositive/join35.q.out +++ b/ql/src/test/results/clientpositive/join35.q.out @@ -50,7 +50,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -58,14 +58,14 @@ STAGE PLANS: expr: (key < 20) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 1903 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key type: string outputColumnNames: key Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 1903 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count(1) @@ -76,7 +76,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 1903 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -86,7 +86,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 1903 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col1 @@ -150,7 +150,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -159,7 +159,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -606,7 +606,7 @@ STAGE PLANS: GatherStats: false Union Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 1802 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -616,7 +616,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 1802 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: _col1 @@ -626,7 +626,7 @@ STAGE PLANS: GatherStats: false Union Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 1802 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -636,7 +636,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 1802 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: _col1 @@ -645,7 +645,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -656,7 +656,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 1 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -762,7 +762,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 19 dataSize: 1982 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col2 @@ -773,14 +773,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 19 dataSize: 1982 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 19 dataSize: 1982 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -808,7 +808,7 @@ STAGE PLANS: TableScan alias: x1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -816,14 +816,14 @@ STAGE PLANS: expr: (key > 100) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 1903 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key type: string outputColumnNames: key Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 1903 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count(1) @@ -834,7 +834,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 1903 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -844,7 +844,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 1903 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col1 @@ -908,7 +908,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -917,7 +917,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 diff --git a/ql/src/test/results/clientpositive/join9.q.out b/ql/src/test/results/clientpositive/join9.q.out index 9afa548..f8f822b 100644 --- a/ql/src/test/results/clientpositive/join9.q.out +++ b/ql/src/test/results/clientpositive/join9.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -38,7 +38,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -47,7 +47,7 @@ STAGE PLANS: TableScan alias: src2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -58,7 +58,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: value @@ -164,7 +164,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(_col0) @@ -173,14 +173,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join_filters_overlap.q.out b/ql/src/test/results/clientpositive/join_filters_overlap.q.out index 22910f2..95e73b1 100644 --- a/ql/src/test/results/clientpositive/join_filters_overlap.q.out +++ b/ql/src/test/results/clientpositive/join_filters_overlap.q.out @@ -172,7 +172,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -189,14 +189,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -403,7 +403,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -420,14 +420,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -634,7 +634,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -651,14 +651,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 39 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -891,7 +891,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9, _col12, _col13 Statistics: - numRows: 1 dataSize: 24 basicStatsState: COMPLETE colStatsState: NONE + numRows: 9 dataSize: 59 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -912,14 +912,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: - numRows: 1 dataSize: 24 basicStatsState: COMPLETE colStatsState: NONE + numRows: 9 dataSize: 59 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 1 dataSize: 24 basicStatsState: COMPLETE colStatsState: NONE + numRows: 9 dataSize: 59 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1149,7 +1149,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5, _col8, _col9, _col12, _col13 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 9 dataSize: 59 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1170,14 +1170,14 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 9 dataSize: 59 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 9 dataSize: 59 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/join_map_ppr.q.out b/ql/src/test/results/clientpositive/join_map_ppr.q.out index f211efe..d5e18fb 100644 --- a/ql/src/test/results/clientpositive/join_map_ppr.q.out +++ b/ql/src/test/results/clientpositive/join_map_ppr.q.out @@ -46,7 +46,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 216 basicStatsState: PARTIAL colStatsState: NONE + numRows: 2 dataSize: 216 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -63,7 +63,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -84,7 +84,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -102,7 +102,7 @@ STAGE PLANS: outputColumnNames: _col0, _col5, _col9 Position of Big Table: 2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -113,14 +113,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -641,7 +641,7 @@ STAGE PLANS: TableScan alias: z Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -659,7 +659,7 @@ STAGE PLANS: outputColumnNames: _col0, _col5, _col9 Position of Big Table: 2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1100 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -670,14 +670,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1100 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1100 dataSize: 12786 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out index 8ddc3c4..17d6bde 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_10.q.out @@ -70,7 +70,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -80,7 +80,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 @@ -88,7 +88,7 @@ STAGE PLANS: NumFilesPerFileSink: 1 Static Partition Specification: ds=2008-04-08/hr=11/ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat diff --git a/ql/src/test/results/clientpositive/load_dyn_part8.q.out b/ql/src/test/results/clientpositive/load_dyn_part8.q.out index 09d0ac8..ce6d402 100644 --- a/ql/src/test/results/clientpositive/load_dyn_part8.q.out +++ b/ql/src/test/results/clientpositive/load_dyn_part8.q.out @@ -55,7 +55,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -63,7 +63,7 @@ STAGE PLANS: expr: (ds <= '2008-04-08') type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -76,14 +76,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -110,7 +110,7 @@ STAGE PLANS: expr: (ds > '2008-04-08') type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -121,7 +121,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 2 @@ -129,7 +129,7 @@ STAGE PLANS: NumFilesPerFileSink: 1 Static Partition Specification: ds=2008-12-31/ Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/louter_join_ppr.q.out b/ql/src/test/results/clientpositive/louter_join_ppr.q.out index de71f54..3d55026 100644 --- a/ql/src/test/results/clientpositive/louter_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/louter_join_ppr.q.out @@ -31,7 +31,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -39,7 +39,7 @@ STAGE PLANS: expr: ((key > 10) and (key < 20)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -49,7 +49,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -60,7 +60,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -68,7 +68,7 @@ STAGE PLANS: expr: ((key > 10) and (key < 20)) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 6 dataSize: 1202 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -78,7 +78,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 6 dataSize: 1202 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -229,7 +229,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 1322 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: @@ -349,7 +349,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -357,7 +357,7 @@ STAGE PLANS: expr: ((key > 10) and (key < 20)) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -367,7 +367,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -380,7 +380,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -388,7 +388,7 @@ STAGE PLANS: expr: ((key > 10) and (key < 20)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -398,7 +398,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -640,14 +640,14 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col6, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 13 dataSize: 2644 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: ((_col6 > 15) and (_col6 < 25)) type: boolean Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 203 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -660,14 +660,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 203 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 203 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -764,7 +764,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -772,7 +772,7 @@ STAGE PLANS: expr: ((key > 10) and (key < 20)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -782,7 +782,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -793,7 +793,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -801,7 +801,7 @@ STAGE PLANS: expr: ((key > 10) and (key < 20)) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -811,7 +811,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -1050,7 +1050,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5, _col6 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 13 dataSize: 2644 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: @@ -1174,7 +1174,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -1182,7 +1182,7 @@ STAGE PLANS: expr: ((key > 10) and (key < 20)) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 6 dataSize: 1202 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -1192,7 +1192,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 6 dataSize: 1202 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -1203,7 +1203,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -1211,7 +1211,7 @@ STAGE PLANS: expr: ((key > 10) and (key < 20)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -1221,7 +1221,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -1372,7 +1372,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col6, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 1322 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: diff --git a/ql/src/test/results/clientpositive/metadataonly1.q.out b/ql/src/test/results/clientpositive/metadataonly1.q.out index 4238be9..fe637a6 100644 --- a/ql/src/test/results/clientpositive/metadataonly1.q.out +++ b/ql/src/test/results/clientpositive/metadataonly1.q.out @@ -849,10 +849,10 @@ STAGE PLANS: 1 handleSkewJoin: false Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 92 basicStatsState: COMPLETE colStatsState: NONE Select Operator Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 92 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/outer_join_ppr.q.out b/ql/src/test/results/clientpositive/outer_join_ppr.q.out index 99dcba0..d9edecb 100644 --- a/ql/src/test/results/clientpositive/outer_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/outer_join_ppr.q.out @@ -31,7 +31,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -42,7 +42,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -53,7 +53,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -64,7 +64,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -308,14 +308,14 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 25572 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: ((((_col4 > 15) and (_col4 < 25)) and (_col0 > 10)) and (_col0 < 20)) type: boolean Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 201 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -328,14 +328,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 201 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 201 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -432,7 +432,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -443,7 +443,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -454,7 +454,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -465,7 +465,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -704,7 +704,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5, _col6 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 127 dataSize: 25572 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: diff --git a/ql/src/test/results/clientpositive/pcr.q.out b/ql/src/test/results/clientpositive/pcr.q.out index b0cd574..9ac856c 100644 --- a/ql/src/test/results/clientpositive/pcr.q.out +++ b/ql/src/test/results/clientpositive/pcr.q.out @@ -84,7 +84,7 @@ STAGE PLANS: expr: (key < 5) type: boolean Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -95,7 +95,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -104,7 +104,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -205,14 +205,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -295,7 +295,7 @@ STAGE PLANS: expr: ((ds <= '2000-04-09') or (key < 5)) type: boolean Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -304,14 +304,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 type: int sort order: + Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -452,14 +452,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -579,7 +579,7 @@ STAGE PLANS: expr: ((key < 5) and (value <> 'val_2')) type: boolean Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -590,7 +590,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -599,7 +599,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -700,14 +700,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 13 dataSize: 104 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -792,7 +792,7 @@ STAGE PLANS: expr: (((ds < '2000-04-09') and (key < 5)) or ((ds > '2000-04-09') and (value = 'val_5'))) type: boolean Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 80 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -803,7 +803,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 80 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -812,7 +812,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 80 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -913,14 +913,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 80 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 80 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1007,7 +1007,7 @@ STAGE PLANS: expr: (((ds < '2000-04-10') and (key < 5)) or ((ds > '2000-04-08') and (value = 'val_5'))) type: boolean Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 16 dataSize: 128 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -1018,7 +1018,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 16 dataSize: 128 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1027,7 +1027,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 16 dataSize: 128 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1170,14 +1170,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 16 dataSize: 128 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 16 dataSize: 128 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1274,7 +1274,7 @@ STAGE PLANS: expr: (((ds < '2000-04-10') or (key < 5)) and ((ds > '2000-04-08') or (value = 'val_5'))) type: boolean Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 33 dataSize: 264 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -1285,7 +1285,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 33 dataSize: 264 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1294,7 +1294,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 33 dataSize: 264 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1437,14 +1437,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 33 dataSize: 264 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 30 dataSize: 240 basicStatsState: COMPLETE colStatsState: NONE + numRows: 33 dataSize: 264 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2270,7 +2270,7 @@ STAGE PLANS: expr: (((ds = '2000-04-08') and (key = 1)) or ((ds = '2000-04-09') and (key = 2))) type: boolean Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -2281,7 +2281,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -2292,7 +2292,7 @@ STAGE PLANS: type: string sort order: +++ Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -2393,14 +2393,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2568,7 +2568,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7 Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -2585,7 +2585,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -2616,7 +2616,7 @@ STAGE PLANS: type: int sort order: + Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -2660,14 +2660,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2912,7 +2912,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7 Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -2929,7 +2929,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -2960,7 +2960,7 @@ STAGE PLANS: type: int sort order: + Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -3004,14 +3004,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 200 dataSize: 3200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 22 dataSize: 176 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -3140,7 +3140,7 @@ STAGE PLANS: expr: (((ds > '2000-04-08') and (ds < '2000-04-11')) or (key = 2)) type: boolean Statistics: - numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE + numRows: 48 dataSize: 384 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -3151,7 +3151,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE + numRows: 48 dataSize: 384 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -3162,7 +3162,7 @@ STAGE PLANS: type: string sort order: +++ Statistics: - numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE + numRows: 48 dataSize: 384 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -3347,14 +3347,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE + numRows: 48 dataSize: 384 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE + numRows: 48 dataSize: 384 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -4985,7 +4985,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -4995,14 +4995,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 type: string sort order: + Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -5060,17 +5060,17 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 10 dataSize: 2000 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 10 dataSize: 2000 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -5151,7 +5151,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -5159,7 +5159,7 @@ STAGE PLANS: expr: (key = 11) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -5172,7 +5172,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -5183,7 +5183,7 @@ STAGE PLANS: type: string sort order: +++ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -5288,14 +5288,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -5382,7 +5382,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -5390,7 +5390,7 @@ STAGE PLANS: expr: (key = 11) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -5403,7 +5403,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -5414,7 +5414,7 @@ STAGE PLANS: type: string sort order: +++ Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -5519,14 +5519,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/ppd_join_filter.q.out b/ql/src/test/results/clientpositive/ppd_join_filter.q.out index ad4fcd6..22ae5d3 100644 --- a/ql/src/test/results/clientpositive/ppd_join_filter.q.out +++ b/ql/src/test/results/clientpositive/ppd_join_filter.q.out @@ -40,7 +40,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -48,7 +48,7 @@ STAGE PLANS: type: string outputColumnNames: key Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: min(key) @@ -59,7 +59,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -69,7 +69,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col1 @@ -133,7 +133,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 2906 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -146,14 +146,14 @@ STAGE PLANS: type: double outputColumnNames: _col0, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 2906 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: (_col2 < 5.0) type: boolean Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -187,7 +187,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: _col3 @@ -198,7 +198,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -209,7 +209,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -293,7 +293,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col7, _col8 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -304,14 +304,14 @@ STAGE PLANS: type: double outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -409,7 +409,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -417,7 +417,7 @@ STAGE PLANS: type: string outputColumnNames: key Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: min(key) @@ -428,7 +428,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -438,7 +438,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col1 @@ -502,7 +502,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 2906 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -515,14 +515,14 @@ STAGE PLANS: type: double outputColumnNames: _col0, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 2906 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: (_col2 < 5.0) type: boolean Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -556,7 +556,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: _col3 @@ -567,7 +567,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -578,7 +578,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -662,7 +662,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col7, _col8 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -673,14 +673,14 @@ STAGE PLANS: type: double outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -778,7 +778,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -786,7 +786,7 @@ STAGE PLANS: type: string outputColumnNames: key Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: min(key) @@ -797,7 +797,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -807,7 +807,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col1 @@ -871,7 +871,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 2906 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -884,14 +884,14 @@ STAGE PLANS: type: double outputColumnNames: _col0, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 2906 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: (_col2 < 5.0) type: boolean Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -925,7 +925,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: _col3 @@ -936,7 +936,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -947,7 +947,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -1031,7 +1031,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col7, _col8 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1042,14 +1042,14 @@ STAGE PLANS: type: double outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1147,7 +1147,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -1155,7 +1155,7 @@ STAGE PLANS: type: string outputColumnNames: key Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: min(key) @@ -1166,7 +1166,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1176,7 +1176,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col1 @@ -1240,7 +1240,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 2906 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1253,14 +1253,14 @@ STAGE PLANS: type: double outputColumnNames: _col0, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 2906 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: (_col2 < 5.0) type: boolean Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -1294,7 +1294,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 1453 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 901 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: _col3 @@ -1305,7 +1305,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -1316,7 +1316,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -1400,7 +1400,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col7, _col8 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1411,14 +1411,14 @@ STAGE PLANS: type: double outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/ppd_union_view.q.out b/ql/src/test/results/clientpositive/ppd_union_view.q.out index fb93b80..1170429 100644 --- a/ql/src/test/results/clientpositive/ppd_union_view.q.out +++ b/ql/src/test/results/clientpositive/ppd_union_view.q.out @@ -347,7 +347,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col1, _col2, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 15 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col5 @@ -358,7 +358,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 15 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -385,7 +385,7 @@ STAGE PLANS: GatherStats: false Union Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 15 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -396,14 +396,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 15 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 15 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -445,7 +445,7 @@ STAGE PLANS: numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE Union Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 15 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -456,14 +456,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 15 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 15 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/ppd_vc.q.out b/ql/src/test/results/clientpositive/ppd_vc.q.out index 24c30af..e0476e0 100644 --- a/ql/src/test/results/clientpositive/ppd_vc.q.out +++ b/ql/src/test/results/clientpositive/ppd_vc.q.out @@ -23,7 +23,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -31,7 +31,7 @@ STAGE PLANS: expr: (BLOCK__OFFSET__INSIDE__FILE < 100) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -44,14 +44,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -327,7 +327,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Reduce Output Operator key expressions: @@ -338,13 +338,13 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 0 b:srcpart TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -352,7 +352,7 @@ STAGE PLANS: expr: ((BLOCK__OFFSET__INSIDE__FILE < 100) and (BLOCK__OFFSET__INSIDE__FILE < 50)) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -367,7 +367,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -377,7 +377,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: _col0 @@ -620,7 +620,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col4, _col5, _col6, _col7, _col8 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col4 @@ -635,7 +635,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1, _col2, _col3, _col4 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -670,7 +670,7 @@ STAGE PLANS: type: bigint sort order: +++ Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -712,14 +712,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 63 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out b/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out index 7dbed37..7d877f7 100644 --- a/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out +++ b/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out @@ -31,7 +31,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -39,7 +39,7 @@ STAGE PLANS: expr: (CASE WHEN (((value like 'aaa%') or (value like 'vvv%'))) THEN (1) ELSE (0) END > 0) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(key) @@ -48,14 +48,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -224,7 +224,7 @@ STAGE PLANS: TableScan alias: srcpart Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -232,7 +232,7 @@ STAGE PLANS: expr: ((ds = '2008-04-08') and (CASE WHEN (((value like 'aaa%') or (value like 'vvv%'))) THEN (1) ELSE (0) END > 0)) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(key) @@ -241,14 +241,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7615 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/push_or.q.out b/ql/src/test/results/clientpositive/push_or.q.out index 1b9be2e..9f996e2 100644 --- a/ql/src/test/results/clientpositive/push_or.q.out +++ b/ql/src/test/results/clientpositive/push_or.q.out @@ -60,7 +60,7 @@ STAGE PLANS: expr: ((ds = '2000-04-09') or (key = 5)) type: boolean Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -71,7 +71,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -80,7 +80,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -181,14 +181,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 20 dataSize: 160 basicStatsState: COMPLETE colStatsState: NONE + numRows: 40 dataSize: 320 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out index d55a1ff..eb7eed3 100644 --- a/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out +++ b/ql/src/test/results/clientpositive/rand_partitionpruner1.q.out @@ -19,7 +19,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -27,7 +27,7 @@ STAGE PLANS: expr: (rand(1) < 0.1) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -36,14 +36,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out index 6fa3f4b..f854c9f 100644 --- a/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out +++ b/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out @@ -36,7 +36,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -44,7 +44,7 @@ STAGE PLANS: expr: (rand(1) < 0.1) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -57,14 +57,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out index c9a0e36..fca59d0 100644 --- a/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out +++ b/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out @@ -21,7 +21,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -29,7 +29,7 @@ STAGE PLANS: expr: ((rand(1) < 0.1) and (not ((key > 50) or (key < 10)))) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -42,14 +42,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -154,7 +154,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -162,7 +162,7 @@ STAGE PLANS: expr: (not ((key > 50) or (key < 10))) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 11 dataSize: 2204 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -175,14 +175,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 11 dataSize: 2204 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 11 dataSize: 2204 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/reduce_deduplicate.q.out b/ql/src/test/results/clientpositive/reduce_deduplicate.q.out index fcbe090..31cf99d 100644 --- a/ql/src/test/results/clientpositive/reduce_deduplicate.q.out +++ b/ql/src/test/results/clientpositive/reduce_deduplicate.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -37,7 +37,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -47,7 +47,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -105,7 +105,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -114,14 +114,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/regexp_extract.q.out b/ql/src/test/results/clientpositive/regexp_extract.q.out index c65890a..ae9595f 100644 --- a/ql/src/test/results/clientpositive/regexp_extract.q.out +++ b/ql/src/test/results/clientpositive/regexp_extract.q.out @@ -31,7 +31,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -45,7 +45,7 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Transform Operator command: cat output info: @@ -60,14 +60,14 @@ STAGE PLANS: serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: (_col0 < 100) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -77,7 +77,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -135,7 +135,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -144,14 +144,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -310,7 +310,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -324,7 +324,7 @@ STAGE PLANS: type: int outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Transform Operator command: cat output info: @@ -339,14 +339,14 @@ STAGE PLANS: serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: (_col0 < 100) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -356,7 +356,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -414,7 +414,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -423,14 +423,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 9 dataSize: 1803 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/router_join_ppr.q.out b/ql/src/test/results/clientpositive/router_join_ppr.q.out index e623528..fc9c727 100644 --- a/ql/src/test/results/clientpositive/router_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/router_join_ppr.q.out @@ -31,7 +31,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -39,7 +39,7 @@ STAGE PLANS: expr: ((key > 15) and (key < 25)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -49,7 +49,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -60,7 +60,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -68,7 +68,7 @@ STAGE PLANS: expr: ((key > 15) and (key < 25)) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -78,7 +78,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -322,14 +322,14 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 13 dataSize: 2644 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: ((_col0 > 10) and (_col0 < 20)) type: boolean Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 203 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -342,14 +342,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 203 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 1 dataSize: 203 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -446,7 +446,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -454,7 +454,7 @@ STAGE PLANS: expr: ((key > 15) and (key < 25)) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 6 dataSize: 1202 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -464,7 +464,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 6 dataSize: 1202 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -475,7 +475,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -483,7 +483,7 @@ STAGE PLANS: expr: ((key > 15) and (key < 25)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -493,7 +493,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -644,7 +644,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col6, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 1322 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: @@ -764,7 +764,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -772,7 +772,7 @@ STAGE PLANS: expr: ((key > 15) and (key < 25)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -782,7 +782,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -793,7 +793,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -801,7 +801,7 @@ STAGE PLANS: expr: ((key > 15) and (key < 25)) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 6 dataSize: 1202 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -811,7 +811,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 6 dataSize: 1202 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -962,7 +962,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col4, _col5 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 6 dataSize: 1322 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: @@ -1082,7 +1082,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -1090,7 +1090,7 @@ STAGE PLANS: expr: ((key > 15) and (key < 25)) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -1100,7 +1100,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 12 dataSize: 2404 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -1113,7 +1113,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -1121,7 +1121,7 @@ STAGE PLANS: expr: ((key > 15) and (key < 25)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -1131,7 +1131,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 3 dataSize: 601 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -1368,7 +1368,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col6, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 13 dataSize: 2644 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: diff --git a/ql/src/test/results/clientpositive/sample1.q.out b/ql/src/test/results/clientpositive/sample1.q.out index 7098dc6..088fcd1 100644 --- a/ql/src/test/results/clientpositive/sample1.q.out +++ b/ql/src/test/results/clientpositive/sample1.q.out @@ -36,7 +36,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -44,7 +44,7 @@ STAGE PLANS: expr: (((hash(rand()) & 2147483647) % 1) = 0) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 14 dataSize: 2805 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: UDFToInteger(key) @@ -57,14 +57,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 14 dataSize: 2805 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 14 dataSize: 2805 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/sample2.q.out b/ql/src/test/results/clientpositive/sample2.q.out index afff95e..672ee62 100644 --- a/ql/src/test/results/clientpositive/sample2.q.out +++ b/ql/src/test/results/clientpositive/sample2.q.out @@ -36,7 +36,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -44,7 +44,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -53,14 +53,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/sample4.q.out b/ql/src/test/results/clientpositive/sample4.q.out index 4a4e7c1..da697a2 100644 --- a/ql/src/test/results/clientpositive/sample4.q.out +++ b/ql/src/test/results/clientpositive/sample4.q.out @@ -36,7 +36,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -44,7 +44,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -53,14 +53,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/sample5.q.out b/ql/src/test/results/clientpositive/sample5.q.out index 95be999..9e7ab23 100644 --- a/ql/src/test/results/clientpositive/sample5.q.out +++ b/ql/src/test/results/clientpositive/sample5.q.out @@ -34,7 +34,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -42,7 +42,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 5) = 0) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -51,14 +51,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/sample6.q.out b/ql/src/test/results/clientpositive/sample6.q.out index a3dcef7..206bb68 100644 --- a/ql/src/test/results/clientpositive/sample6.q.out +++ b/ql/src/test/results/clientpositive/sample6.q.out @@ -34,7 +34,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -42,7 +42,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 4) = 0) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -51,14 +51,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -598,7 +598,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -606,7 +606,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 4) = 3) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -615,7 +615,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -624,7 +624,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -684,14 +684,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -987,7 +987,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -995,7 +995,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -1004,7 +1004,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1013,7 +1013,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1073,14 +1073,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1630,7 +1630,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -1638,7 +1638,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 3) = 0) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -1647,7 +1647,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1656,7 +1656,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1716,14 +1716,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2116,7 +2116,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -2124,7 +2124,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 3) = 1) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -2133,7 +2133,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -2142,7 +2142,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -2202,14 +2202,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2588,7 +2588,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -2596,7 +2596,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -2605,7 +2605,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -2614,7 +2614,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -2719,14 +2719,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -2905,7 +2905,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -2913,7 +2913,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 4) = 1) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -2922,7 +2922,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -2931,7 +2931,7 @@ STAGE PLANS: type: string sort order: ++ Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -2991,14 +2991,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 27 dataSize: 2853 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/sample7.q.out b/ql/src/test/results/clientpositive/sample7.q.out index d830ef8..2829e6a 100644 --- a/ql/src/test/results/clientpositive/sample7.q.out +++ b/ql/src/test/results/clientpositive/sample7.q.out @@ -36,7 +36,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -44,7 +44,7 @@ STAGE PLANS: expr: ((((hash(key) & 2147483647) % 4) = 0) and (key > 100)) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 1881 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -53,14 +53,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 1881 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 18 dataSize: 1881 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/sample8.q.out b/ql/src/test/results/clientpositive/sample8.q.out index 5b25c51..901ac40 100644 --- a/ql/src/test/results/clientpositive/sample8.q.out +++ b/ql/src/test/results/clientpositive/sample8.q.out @@ -32,7 +32,7 @@ STAGE PLANS: TableScan alias: s Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -40,7 +40,7 @@ STAGE PLANS: expr: ((((hash(key) & 2147483647) % 10) = 0) and (((hash(key) & 2147483647) % 1) = 0)) type: boolean Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 7 dataSize: 1402 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -54,7 +54,7 @@ STAGE PLANS: expr: value type: string Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: NONE + numRows: 7 dataSize: 1402 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -69,7 +69,7 @@ STAGE PLANS: TableScan alias: t Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -77,7 +77,7 @@ STAGE PLANS: expr: ((((hash(key) & 2147483647) % 1) = 0) and (((hash(key) & 2147483647) % 10) = 0)) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -91,7 +91,7 @@ STAGE PLANS: expr: value type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: key @@ -285,7 +285,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col2, _col3, _col6, _col7 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 31 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: diff --git a/ql/src/test/results/clientpositive/sample9.q.out b/ql/src/test/results/clientpositive/sample9.q.out index 000fd79..dc75ff1 100644 --- a/ql/src/test/results/clientpositive/sample9.q.out +++ b/ql/src/test/results/clientpositive/sample9.q.out @@ -21,7 +21,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 11603 basicStatsState: PARTIAL colStatsState: NONE + numRows: 111 dataSize: 11603 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: true @@ -29,7 +29,7 @@ STAGE PLANS: expr: (((hash(key) & 2147483647) % 2) = 0) type: boolean Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -38,14 +38,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5801 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5749 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/serde_user_properties.q.out b/ql/src/test/results/clientpositive/serde_user_properties.q.out index 3588e6a..90b2865 100644 --- a/ql/src/test/results/clientpositive/serde_user_properties.q.out +++ b/ql/src/test/results/clientpositive/serde_user_properties.q.out @@ -21,7 +21,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -29,14 +29,14 @@ STAGE PLANS: type: string outputColumnNames: _col0 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -124,7 +124,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -132,14 +132,14 @@ STAGE PLANS: type: string outputColumnNames: _col0 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -227,7 +227,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -235,14 +235,14 @@ STAGE PLANS: type: string outputColumnNames: _col0 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -334,7 +334,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -342,14 +342,14 @@ STAGE PLANS: type: string outputColumnNames: _col0 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -439,7 +439,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -447,14 +447,14 @@ STAGE PLANS: type: string outputColumnNames: _col0 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -548,7 +548,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -556,14 +556,14 @@ STAGE PLANS: type: string outputColumnNames: _col0 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -653,7 +653,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -661,14 +661,14 @@ STAGE PLANS: type: string outputColumnNames: _col0 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_13.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_13.q.out index d8342c1..bab560b 100644 --- a/ql/src/test/results/clientpositive/smb_mapjoin_13.q.out +++ b/ql/src/test/results/clientpositive/smb_mapjoin_13.q.out @@ -308,7 +308,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col4, _col5 Position of Big Table: 0 Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -321,14 +321,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 type: int sort order: + Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -396,17 +396,17 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 10 dataSize: 200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 100 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 10 dataSize: 200 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 100 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_15.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_15.q.out index fb1688c..4ba4d6a 100644 --- a/ql/src/test/results/clientpositive/smb_mapjoin_15.q.out +++ b/ql/src/test/results/clientpositive/smb_mapjoin_15.q.out @@ -741,7 +741,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col5, _col6, _col7 Position of Big Table: 0 Statistics: - numRows: 125000 dataSize: 3500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 7939 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -759,14 +759,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 Statistics: - numRows: 125000 dataSize: 3500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 7939 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 type: int sort order: + Statistics: - numRows: 125000 dataSize: 3500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 7939 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -838,17 +838,17 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 125000 dataSize: 3500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 7939 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 10 dataSize: 280 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 140 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 10 dataSize: 280 basicStatsState: COMPLETE colStatsState: NONE + numRows: 10 dataSize: 140 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out b/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out index 8540fd8..096469c 100644 --- a/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out +++ b/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out @@ -168,11 +168,11 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator Statistics: - numRows: 125000 dataSize: 2500000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 550 dataSize: 5843 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out b/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out index ead54d8..3e88520 100644 --- a/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out +++ b/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out @@ -266,10 +266,10 @@ STAGE PLANS: 1 [Column[key]] Position of Big Table: 0 Statistics: - numRows: 500000 dataSize: 10000000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 1100 dataSize: 11686 basicStatsState: COMPLETE colStatsState: NONE Select Operator Statistics: - numRows: 500000 dataSize: 10000000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 1100 dataSize: 11686 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count() diff --git a/ql/src/test/results/clientpositive/stats0.q.out b/ql/src/test/results/clientpositive/stats0.q.out index 7958a7d..d1111ef 100644 --- a/ql/src/test/results/clientpositive/stats0.q.out +++ b/ql/src/test/results/clientpositive/stats0.q.out @@ -27,7 +27,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -37,14 +37,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1374,7 +1374,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -1384,14 +1384,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/stats11.q.out b/ql/src/test/results/clientpositive/stats11.q.out index 34e2b09..4ff4088 100644 --- a/ql/src/test/results/clientpositive/stats11.q.out +++ b/ql/src/test/results/clientpositive/stats11.q.out @@ -349,7 +349,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -375,7 +375,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -390,7 +390,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 0 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -402,14 +402,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -850,7 +850,7 @@ STAGE PLANS: TableScan alias: a Statistics: - numRows: 0 dataSize: 2750 basicStatsState: PARTIAL colStatsState: NONE + numRows: 26 dataSize: 2750 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false HashTable Sink Operator condition expressions: @@ -876,7 +876,7 @@ STAGE PLANS: TableScan alias: b Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 55 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Map Join Operator condition map: @@ -891,7 +891,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5 Position of Big Table: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE BucketMapJoin: true Select Operator expressions: @@ -903,14 +903,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: NONE + numRows: 60 dataSize: 6393 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/transform_ppr1.q.out b/ql/src/test/results/clientpositive/transform_ppr1.q.out index a007605..ab9471b 100644 --- a/ql/src/test/results/clientpositive/transform_ppr1.q.out +++ b/ql/src/test/results/clientpositive/transform_ppr1.q.out @@ -31,7 +31,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -43,7 +43,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE Transform Operator command: cat output info: @@ -57,14 +57,14 @@ STAGE PLANS: serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Statistics: - numRows: 0 dataSize: 23248 basicStatsState: PARTIAL colStatsState: NONE + numRows: 116 dataSize: 23248 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: ((_col1 < 100) and (_col0 = '2008-04-08')) type: boolean Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col1 @@ -74,7 +74,7 @@ STAGE PLANS: expr: _col1 type: string Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -263,7 +263,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col1 @@ -272,14 +272,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/transform_ppr2.q.out b/ql/src/test/results/clientpositive/transform_ppr2.q.out index bc0be29..40c1a11 100644 --- a/ql/src/test/results/clientpositive/transform_ppr2.q.out +++ b/ql/src/test/results/clientpositive/transform_ppr2.q.out @@ -33,7 +33,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -45,7 +45,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Transform Operator command: cat output info: @@ -59,14 +59,14 @@ STAGE PLANS: serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE Filter Operator isSamplingPred: false predicate: expr: (_col1 < 100) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col1 @@ -76,7 +76,7 @@ STAGE PLANS: expr: _col1 type: string Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -179,7 +179,7 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col1 @@ -188,14 +188,14 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/udf_explode.q.out b/ql/src/test/results/clientpositive/udf_explode.q.out index 9ca585f..a4a1d9b 100644 --- a/ql/src/test/results/clientpositive/udf_explode.q.out +++ b/ql/src/test/results/clientpositive/udf_explode.q.out @@ -240,7 +240,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE Select Operator expressions: expr: _col0 @@ -249,14 +249,14 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -553,7 +553,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE Select Operator expressions: expr: _col0 @@ -564,14 +564,14 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/udf_reflect.q.out b/ql/src/test/results/clientpositive/udf_reflect.q.out index 5d86d2d..459cd4d 100644 --- a/ql/src/test/results/clientpositive/udf_reflect.q.out +++ b/ql/src/test/results/clientpositive/udf_reflect.q.out @@ -48,7 +48,7 @@ STAGE PLANS: alias: src Row Limit Per Split: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -70,7 +70,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE ListSink diff --git a/ql/src/test/results/clientpositive/udf_reflect2.q.out b/ql/src/test/results/clientpositive/udf_reflect2.q.out index 36505f9..6494a60 100644 --- a/ql/src/test/results/clientpositive/udf_reflect2.q.out +++ b/ql/src/test/results/clientpositive/udf_reflect2.q.out @@ -93,7 +93,7 @@ STAGE PLANS: TableScan alias: src Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Select Operator expressions: @@ -105,7 +105,7 @@ STAGE PLANS: type: timestamp outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -172,17 +172,17 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25, _col26, _col27, _col28, _col29, _col30 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 29 dataSize: 5812 basicStatsState: COMPLETE colStatsState: NONE Limit Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 5 dataSize: 1000 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 5 dataSize: 1000 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/udtf_explode.q.out b/ql/src/test/results/clientpositive/udtf_explode.q.out index abf5031..8206e33 100644 --- a/ql/src/test/results/clientpositive/udtf_explode.q.out +++ b/ql/src/test/results/clientpositive/udtf_explode.q.out @@ -310,7 +310,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE Select Operator expressions: expr: _col0 @@ -319,14 +319,14 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -624,7 +624,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE Select Operator expressions: expr: _col0 @@ -635,14 +635,14 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1, _col2 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 2906 basicStatsState: PARTIAL colStatsState: COMPLETE + numRows: 0 dataSize: 0 basicStatsState: NONE colStatsState: COMPLETE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/union22.q.out b/ql/src/test/results/clientpositive/union22.q.out index 95021f9..6dbf14a 100644 --- a/ql/src/test/results/clientpositive/union22.q.out +++ b/ql/src/test/results/clientpositive/union22.q.out @@ -334,7 +334,7 @@ STAGE PLANS: GatherStats: false Union Statistics: - numRows: 31500 dataSize: 1727000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 348 dataSize: 9684 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -347,7 +347,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 31500 dataSize: 1727000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 348 dataSize: 9684 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 @@ -355,7 +355,7 @@ STAGE PLANS: NumFilesPerFileSink: 1 Static Partition Specification: ds=2/ Statistics: - numRows: 31500 dataSize: 1727000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 348 dataSize: 9684 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -388,7 +388,7 @@ STAGE PLANS: expr: (k0 <= 50) type: boolean Statistics: - numRows: 250 dataSize: 8250 basicStatsState: COMPLETE colStatsState: NONE + numRows: 166 dataSize: 5622 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: k1 @@ -401,10 +401,10 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 250 dataSize: 8250 basicStatsState: COMPLETE colStatsState: NONE + numRows: 166 dataSize: 5622 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 31500 dataSize: 1727000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 348 dataSize: 9684 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -417,7 +417,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 31500 dataSize: 1727000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 348 dataSize: 9684 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 1 @@ -425,7 +425,7 @@ STAGE PLANS: NumFilesPerFileSink: 1 Static Partition Specification: ds=2/ Statistics: - numRows: 31500 dataSize: 1727000 basicStatsState: COMPLETE colStatsState: NONE + numRows: 348 dataSize: 9684 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -558,7 +558,7 @@ STAGE PLANS: expr: (k1 > 20) type: boolean Statistics: - numRows: 250 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE + numRows: 166 dataSize: 3693 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: k1 @@ -568,7 +568,7 @@ STAGE PLANS: expr: k1 type: string Statistics: - numRows: 250 dataSize: 5500 basicStatsState: COMPLETE colStatsState: NONE + numRows: 166 dataSize: 3693 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: k1 @@ -589,7 +589,7 @@ STAGE PLANS: expr: ((k0 > 50) and (k1 > 20)) type: boolean Statistics: - numRows: 250 dataSize: 8250 basicStatsState: COMPLETE colStatsState: NONE + numRows: 55 dataSize: 1862 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: k1 @@ -600,7 +600,7 @@ STAGE PLANS: type: string outputColumnNames: _col1, _col3, _col4 Statistics: - numRows: 250 dataSize: 8250 basicStatsState: COMPLETE colStatsState: NONE + numRows: 55 dataSize: 1862 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col1 @@ -610,7 +610,7 @@ STAGE PLANS: expr: _col1 type: string Statistics: - numRows: 250 dataSize: 8250 basicStatsState: COMPLETE colStatsState: NONE + numRows: 55 dataSize: 1862 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: _col3 @@ -721,7 +721,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col1, _col10, _col11 Statistics: - numRows: 31250 dataSize: 1718750 basicStatsState: COMPLETE colStatsState: NONE + numRows: 182 dataSize: 4062 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -734,7 +734,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 31250 dataSize: 1718750 basicStatsState: COMPLETE colStatsState: NONE + numRows: 182 dataSize: 4062 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 diff --git a/ql/src/test/results/clientpositive/union24.q.out b/ql/src/test/results/clientpositive/union24.q.out index 06cbd19..94d820d 100644 --- a/ql/src/test/results/clientpositive/union24.q.out +++ b/ql/src/test/results/clientpositive/union24.q.out @@ -74,14 +74,14 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key type: string outputColumnNames: key Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count(1) @@ -92,7 +92,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1 Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -102,7 +102,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col1 @@ -166,7 +166,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 77 dataSize: 308 basicStatsState: COMPLETE colStatsState: NONE + numRows: 51 dataSize: 244 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -175,7 +175,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 77 dataSize: 308 basicStatsState: COMPLETE colStatsState: NONE + numRows: 51 dataSize: 244 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -202,7 +202,7 @@ STAGE PLANS: GatherStats: false Union Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -211,7 +211,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -220,7 +220,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -239,7 +239,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -248,10 +248,10 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -260,7 +260,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -269,7 +269,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -288,7 +288,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -297,10 +297,10 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -309,7 +309,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -318,7 +318,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -337,7 +337,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -346,10 +346,10 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -358,7 +358,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -367,7 +367,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -532,14 +532,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 539 dataSize: 2156 basicStatsState: COMPLETE colStatsState: NONE + numRows: 360 dataSize: 1726 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -661,7 +661,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -671,7 +671,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -688,7 +688,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -698,7 +698,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE tag: 1 value expressions: expr: count @@ -804,7 +804,7 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0, _col5 Statistics: - numRows: 11858 dataSize: 94864 basicStatsState: COMPLETE colStatsState: NONE + numRows: 113 dataSize: 543 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -813,7 +813,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 11858 dataSize: 94864 basicStatsState: COMPLETE colStatsState: NONE + numRows: 113 dataSize: 543 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -840,7 +840,7 @@ STAGE PLANS: GatherStats: false Union Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -849,7 +849,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -858,7 +858,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -877,7 +877,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -886,10 +886,10 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -898,7 +898,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -907,7 +907,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -926,7 +926,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -935,10 +935,10 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -947,7 +947,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -956,7 +956,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1078,14 +1078,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 12166 dataSize: 96096 basicStatsState: COMPLETE colStatsState: NONE + numRows: 319 dataSize: 1531 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -1198,7 +1198,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -1208,7 +1208,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE tag: 0 value expressions: expr: key @@ -1225,7 +1225,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: key @@ -1235,7 +1235,7 @@ STAGE PLANS: expr: key type: string Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE tag: 1 Path -> Alias: #### A masked pattern was here #### @@ -1338,14 +1338,14 @@ STAGE PLANS: handleSkewJoin: false outputColumnNames: _col0 Statistics: - numRows: 11858 dataSize: 94864 basicStatsState: COMPLETE colStatsState: NONE + numRows: 113 dataSize: 543 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 type: string outputColumnNames: _col0 Statistics: - numRows: 11858 dataSize: 94864 basicStatsState: COMPLETE colStatsState: NONE + numRows: 113 dataSize: 543 basicStatsState: COMPLETE colStatsState: NONE Group By Operator aggregations: expr: count(1) @@ -1356,7 +1356,7 @@ STAGE PLANS: mode: hash outputColumnNames: _col0, _col1 Statistics: - numRows: 11858 dataSize: 94864 basicStatsState: COMPLETE colStatsState: NONE + numRows: 113 dataSize: 543 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -1390,7 +1390,7 @@ STAGE PLANS: expr: _col0 type: string Statistics: - numRows: 11858 dataSize: 94864 basicStatsState: COMPLETE colStatsState: NONE + numRows: 113 dataSize: 543 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col1 @@ -1432,7 +1432,7 @@ STAGE PLANS: mode: mergepartial outputColumnNames: _col0, _col1 Statistics: - numRows: 5929 dataSize: 47432 basicStatsState: COMPLETE colStatsState: NONE + numRows: 56 dataSize: 269 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1441,7 +1441,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 5929 dataSize: 47432 basicStatsState: COMPLETE colStatsState: NONE + numRows: 56 dataSize: 269 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -1468,7 +1468,7 @@ STAGE PLANS: GatherStats: false Union Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1477,7 +1477,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1486,7 +1486,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1505,7 +1505,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -1514,10 +1514,10 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1526,7 +1526,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1535,7 +1535,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1554,7 +1554,7 @@ STAGE PLANS: expr: (key < 10) type: boolean Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -1563,10 +1563,10 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 154 dataSize: 616 basicStatsState: COMPLETE colStatsState: NONE + numRows: 103 dataSize: 494 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -1575,7 +1575,7 @@ STAGE PLANS: type: bigint outputColumnNames: _col0, _col1 Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -1584,7 +1584,7 @@ STAGE PLANS: type: bigint sort order: ++ Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -1706,14 +1706,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 6237 dataSize: 48664 basicStatsState: COMPLETE colStatsState: NONE + numRows: 262 dataSize: 1257 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat diff --git a/ql/src/test/results/clientpositive/union_ppr.q.out b/ql/src/test/results/clientpositive/union_ppr.q.out index 7550110..38b8439 100644 --- a/ql/src/test/results/clientpositive/union_ppr.q.out +++ b/ql/src/test/results/clientpositive/union_ppr.q.out @@ -31,7 +31,7 @@ STAGE PLANS: TableScan alias: x Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -39,7 +39,7 @@ STAGE PLANS: expr: (key < 100) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -52,10 +52,10 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -68,7 +68,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -81,7 +81,7 @@ STAGE PLANS: type: string sort order: ++++ Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -96,7 +96,7 @@ STAGE PLANS: TableScan alias: y Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 58 dataSize: 11624 basicStatsState: COMPLETE colStatsState: NONE GatherStats: false Filter Operator isSamplingPred: false @@ -104,7 +104,7 @@ STAGE PLANS: expr: (key < 100) type: boolean Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: key @@ -117,10 +117,10 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 5812 basicStatsState: PARTIAL colStatsState: NONE + numRows: 19 dataSize: 3807 basicStatsState: COMPLETE colStatsState: NONE Union Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE Select Operator expressions: expr: _col0 @@ -133,7 +133,7 @@ STAGE PLANS: type: string outputColumnNames: _col0, _col1, _col2, _col3 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE Reduce Output Operator key expressions: expr: _col0 @@ -146,7 +146,7 @@ STAGE PLANS: type: string sort order: ++++ Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE tag: -1 value expressions: expr: _col0 @@ -251,14 +251,14 @@ STAGE PLANS: Reduce Operator Tree: Extract Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 Statistics: - numRows: 0 dataSize: 11624 basicStatsState: PARTIAL colStatsState: NONE + numRows: 38 dataSize: 7614 basicStatsState: COMPLETE colStatsState: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat