diff --git a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java index 41d150c..1466b69 100644 --- a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java +++ b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java @@ -222,16 +222,6 @@ public static void setBasicStatsState(Map params, String setting // old format of statsAcc, e.g., TRUE or FALSE LOG.debug("In StatsSetupConst, JsonParser can not parse statsAcc."); stats = new JSONObject(new LinkedHashMap()); - try { - if (statsAcc.equals(TRUE)) { - stats.put(BASIC_STATS, TRUE); - } else { - stats.put(BASIC_STATS, FALSE); - } - } catch (JSONException e1) { - // impossible to throw any json exceptions. - LOG.trace(e1.getMessage()); - } } if (!stats.has(BASIC_STATS)) { // duplicate key is not possible @@ -332,4 +322,13 @@ public static void clearColumnStatsState(Map params) { params.put(COLUMN_STATS_ACCURATE, stats.toString()); } } + + public static void setBasicStatsStateForCreateTable(Map params, String setting) { + if (TRUE.equals(setting)) { + for (String stat : StatsSetupConst.supportedStats) { + params.put(stat, "0"); + } + } + setBasicStatsState(params, setting); + } } diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java index c046708..76c1636 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java @@ -103,7 +103,7 @@ protected void setUp() { db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true); db.createTable(src, cols, null, TextInputFormat.class, IgnoreKeyTextOutputFormat.class); - db.loadTable(hadoopDataFile[i], src, false, false, false, false); + db.loadTable(hadoopDataFile[i], src, false, false, false, false, false); i++; } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 76220f4..da3da8b 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -265,6 +265,13 @@ public static void populateQuickStats(FileStatus[] fileStatus, Map parameters = tTable.getParameters(); + // In the following scenarios, we need to reset the stats to true. + // work.getTableSpecs() != null means analyze command + // work.getLoadTableDesc().getReplace() is true means insert overwrite command etc. + if (work.getTableSpecs() != null + || work.getLoadTableDesc().getReplace()) { + StatsSetupConst.setBasicStatsState(parameters, StatsSetupConst.TRUE); + } // non-partitioned tables: if (!existStats(parameters) && atomic) { return 0; @@ -171,20 +178,22 @@ private int aggregateStats(Hive db) { // The collectable stats for the aggregator needs to be cleared. // For eg. if a file is being loaded, the old number of rows are not valid if (work.isClearAggregatorStats()) { - clearStats(parameters); - } - - if (statsAggregator != null) { - String prefix = getAggregationPrefix(table, null); - updateStats(statsAggregator, parameters, prefix, atomic); + // we choose to keep the invalid stats and only change the setting. + StatsSetupConst.setBasicStatsState(parameters, StatsSetupConst.FALSE); } updateQuickStats(wh, parameters, tTable.getSd()); - - // write table stats to metastore - if (!getWork().getNoStatsAggregator()) { - environmentContext = new EnvironmentContext(); - environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.TASK); + if (StatsSetupConst.areBasicStatsUptoDate(parameters)) { + if (statsAggregator != null) { + String prefix = getAggregationPrefix(table, null); + updateStats(statsAggregator, parameters, prefix, atomic); + } + // write table stats to metastore + if (!getWork().getNoStatsAggregator()) { + environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, + StatsSetupConst.TASK); + } } getHive().alterTable(tableFullName, new Table(tTable), environmentContext); @@ -203,6 +212,10 @@ private int aggregateStats(Hive db) { // org.apache.hadoop.hive.metastore.api.Partition tPart = partn.getTPartition(); Map parameters = tPart.getParameters(); + if (work.getTableSpecs() != null + || work.getLoadTableDesc().getReplace()) { + StatsSetupConst.setBasicStatsState(parameters, StatsSetupConst.TRUE); + } if (!existStats(parameters) && atomic) { continue; } @@ -210,20 +223,21 @@ private int aggregateStats(Hive db) { // The collectable stats for the aggregator needs to be cleared. // For eg. if a file is being loaded, the old number of rows are not valid if (work.isClearAggregatorStats()) { - clearStats(parameters); - } - - if (statsAggregator != null) { - String prefix = getAggregationPrefix(table, partn); - updateStats(statsAggregator, parameters, prefix, atomic); + // we choose to keep the invalid stats and only change the setting. + StatsSetupConst.setBasicStatsState(parameters, StatsSetupConst.FALSE); } updateQuickStats(wh, parameters, tPart.getSd()); - - if (!getWork().getNoStatsAggregator()) { - environmentContext = new EnvironmentContext(); - environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, - StatsSetupConst.TASK); + if (StatsSetupConst.areBasicStatsUptoDate(parameters)) { + if (statsAggregator != null) { + String prefix = getAggregationPrefix(table, partn); + updateStats(statsAggregator, parameters, prefix, atomic); + } + if (!getWork().getNoStatsAggregator()) { + environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, + StatsSetupConst.TASK); + } } updates.add(new Partition(table, tPart)); @@ -346,14 +360,6 @@ private void updateQuickStats(Warehouse wh, Map parameters, MetaStoreUtils.populateQuickStats(partfileStatus, parameters); } - private void clearStats(Map parameters) { - for (String statType : StatsSetupConst.supportedStats) { - if (parameters.containsKey(statType)) { - parameters.remove(statType); - } - } - } - private String toString(Map parameters) { StringBuilder builder = new StringBuilder(); for (String statType : StatsSetupConst.supportedStats) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java index 4c9acce..cef1c4b 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java @@ -1434,10 +1434,10 @@ public Database getDatabaseCurrent() throws HiveException { public void loadPartition(Path loadPath, String tableName, Map partSpec, boolean replace, boolean inheritTableSpecs, boolean isSkewedStoreAsSubdir, - boolean isSrcLocal, boolean isAcid) throws HiveException { + boolean isSrcLocal, boolean isAcid, boolean hasFollowingStatsTask) throws HiveException { Table tbl = getTable(tableName); loadPartition(loadPath, tbl, partSpec, replace, inheritTableSpecs, - isSkewedStoreAsSubdir, isSrcLocal, isAcid); + isSkewedStoreAsSubdir, isSrcLocal, isAcid, hasFollowingStatsTask); } /** @@ -1464,7 +1464,7 @@ public void loadPartition(Path loadPath, String tableName, public Partition loadPartition(Path loadPath, Table tbl, Map partSpec, boolean replace, boolean inheritTableSpecs, boolean isSkewedStoreAsSubdir, - boolean isSrcLocal, boolean isAcid) throws HiveException { + boolean isSrcLocal, boolean isAcid, boolean hasFollowingStatsTask) throws HiveException { Path tblDataLocationPath = tbl.getDataLocation(); try { /** @@ -1543,10 +1543,19 @@ public Partition loadPartition(Path loadPath, Table tbl, } if (oldPart == null) { newTPart.getTPartition().setParameters(new HashMap()); + if (this.getConf().getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { + StatsSetupConst.setBasicStatsStateForCreateTable(newTPart.getParameters(), + StatsSetupConst.TRUE); + } MetaStoreUtils.populateQuickStats(HiveStatsUtils.getFileStatusRecurse(newPartPath, -1, newPartPath.getFileSystem(conf)), newTPart.getParameters()); getMSC().add_partition(newTPart.getTPartition()); } else { - alterPartition(tbl.getDbName(), tbl.getTableName(), new Partition(tbl, newTPart.getTPartition()), null); + EnvironmentContext environmentContext = null; + if (hasFollowingStatsTask) { + environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); + } + alterPartition(tbl.getDbName(), tbl.getTableName(), new Partition(tbl, newTPart.getTPartition()), environmentContext); } return newTPart; } catch (IOException e) { @@ -1664,7 +1673,7 @@ private void constructOneLBLocationMap(FileStatus fSta, */ public Map, Partition> loadDynamicPartitions(Path loadPath, String tableName, Map partSpec, boolean replace, - int numDP, boolean listBucketingEnabled, boolean isAcid, long txnId) + int numDP, boolean listBucketingEnabled, boolean isAcid, long txnId, boolean hasFollowingStatsTask) throws HiveException { Set validPartitions = new HashSet(); @@ -1714,7 +1723,7 @@ private void constructOneLBLocationMap(FileStatus fSta, LinkedHashMap fullPartSpec = new LinkedHashMap(partSpec); Warehouse.makeSpecFromName(fullPartSpec, partPath); Partition newPartition = loadPartition(partPath, tbl, fullPartSpec, replace, - true, listBucketingEnabled, false, isAcid); + true, listBucketingEnabled, false, isAcid, hasFollowingStatsTask); partitionsMap.put(fullPartSpec, newPartition); if (inPlaceEligible) { InPlaceUpdates.rePositionCursor(ps); @@ -1753,10 +1762,12 @@ private void constructOneLBLocationMap(FileStatus fSta, * If the source directory is LOCAL * @param isSkewedStoreAsSubdir * if list bucketing enabled + * @param hasFollowingStatsTask + * if there is any following stats task * @param isAcid true if this is an ACID based write */ - public void loadTable(Path loadPath, String tableName, boolean replace, - boolean isSrcLocal, boolean isSkewedStoreAsSubdir, boolean isAcid) + public void loadTable(Path loadPath, String tableName, boolean replace, boolean isSrcLocal, + boolean isSkewedStoreAsSubdir, boolean isAcid, boolean hasFollowingStatsTask) throws HiveException { List newFiles = null; @@ -1798,8 +1809,13 @@ public void loadTable(Path loadPath, String tableName, boolean replace, throw new HiveException(e); } + EnvironmentContext environmentContext = null; + if (hasFollowingStatsTask) { + environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); + } try { - alterTable(tableName, tbl, null); + alterTable(tableName, tbl, environmentContext); } catch (InvalidOperationException e) { throw new HiveException(e); } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java index 549d24f..bba6463 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java @@ -286,7 +286,7 @@ private AddPartitionDesc getBaseAddPartitionDescFromPartition( private CreateTableDesc getBaseCreateTableDescFromTable(String dbName, org.apache.hadoop.hive.metastore.api.Table table) { if ((table.getPartitionKeys() == null) || (table.getPartitionKeys().size() == 0)){ - table.putToParameters(StatsSetupConst.DO_NOT_UPDATE_STATS,"true"); + table.putToParameters(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); } CreateTableDesc tblDesc = new CreateTableDesc( dbName, diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java index 03b4d8b..236d026 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java @@ -25,6 +25,7 @@ import java.util.Map; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -777,7 +778,17 @@ public Table toTable(HiveConf conf) throws HiveException { } } } + if (getLocation() == null && !this.isCTAS) { + if (!tbl.isPartitioned() && conf.getBoolVar(HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { + StatsSetupConst.setBasicStatsStateForCreateTable(tbl.getTTable().getParameters(), + StatsSetupConst.TRUE); + } + } else { + StatsSetupConst.setBasicStatsStateForCreateTable(tbl.getTTable().getParameters(), + StatsSetupConst.FALSE); + } return tbl; } + } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java index eaeb66b..95d00f2 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestExecDriver.java @@ -141,7 +141,7 @@ db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true); db.createTable(src, cols, null, TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class); - db.loadTable(hadoopDataFile[i], src, false, true, false, false); + db.loadTable(hadoopDataFile[i], src, false, true, false, false, false); i++; } diff --git a/ql/src/test/queries/clientpositive/insert_values_orig_table_use_metadata.q b/ql/src/test/queries/clientpositive/insert_values_orig_table_use_metadata.q new file mode 100644 index 0000000..73f5243 --- /dev/null +++ b/ql/src/test/queries/clientpositive/insert_values_orig_table_use_metadata.q @@ -0,0 +1,121 @@ +set hive.support.concurrency=true; +set hive.txn.manager=org.apache.hadoop.hive.ql.lockmgr.DbTxnManager; +set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat; +set hive.compute.query.using.stats=true; + +create table acid_ivot( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true'); + +desc formatted acid_ivot; + +LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot; + +desc formatted acid_ivot; + +explain select count(*) from acid_ivot; + +select count(*) from acid_ivot; + +insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true); + +desc formatted acid_ivot; + +explain select count(*) from acid_ivot; + +select count(*) from acid_ivot; + +drop table acid_ivot; + +create table acid_ivot( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true'); + +insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true); + +desc formatted acid_ivot; + +explain select count(*) from acid_ivot; + +select count(*) from acid_ivot; + +insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true); + +desc formatted acid_ivot; + +explain select count(*) from acid_ivot; + +select count(*) from acid_ivot; + +LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot; + +desc formatted acid_ivot; + +explain select count(*) from acid_ivot; + +drop table acid_ivot; + +create table acid_ivot like src; + +desc formatted acid_ivot; + +insert overwrite table acid_ivot select * from src; + +desc formatted acid_ivot; + +explain select count(*) from acid_ivot; + +select count(*) from acid_ivot; + +CREATE TABLE sp (key STRING COMMENT 'default', value STRING COMMENT 'default') +PARTITIONED BY (ds STRING, hr STRING) +STORED AS TEXTFILE; + +LOAD DATA LOCAL INPATH "../../data/files/kv1.txt" +OVERWRITE INTO TABLE sp PARTITION (ds="2008-04-08", hr="11"); + +desc formatted sp PARTITION (ds="2008-04-08", hr="11"); + +explain select count(*) from sp where ds="2008-04-08" and hr="11"; + +select count(*) from sp where ds="2008-04-08" and hr="11"; + +insert into table sp PARTITION (ds="2008-04-08", hr="11") values + ('1', '2'), ('3', '4'); + +desc formatted sp PARTITION (ds="2008-04-08", hr="11"); + +analyze table sp PARTITION (ds="2008-04-08", hr="11") compute statistics; + +desc formatted sp PARTITION (ds="2008-04-08", hr="11"); + +explain select count(*) from sp where ds="2008-04-08" and hr="11"; + +select count(*) from sp where ds="2008-04-08" and hr="11"; + diff --git a/ql/src/test/queries/clientpositive/stats20.q b/ql/src/test/queries/clientpositive/stats20.q index 59701bd..79fd2b8 100644 --- a/ql/src/test/queries/clientpositive/stats20.q +++ b/ql/src/test/queries/clientpositive/stats20.q @@ -7,10 +7,12 @@ insert overwrite table stats_partitioned partition (ds='1') select * from src; -- rawDataSize is 5312 after config is turned on describe formatted stats_partitioned; +describe formatted stats_partitioned partition (ds='1'); set hive.stats.collect.rawdatasize=false; insert overwrite table stats_partitioned partition (ds='1') select * from src; -- rawDataSize is 0 after config is turned off describe formatted stats_partitioned; +describe formatted stats_partitioned partition (ds='1'); diff --git a/ql/src/test/results/clientnegative/alter_file_format.q.out b/ql/src/test/results/clientnegative/alter_file_format.q.out index 96f1bfb..e3f3b4c 100644 --- a/ql/src/test/results/clientnegative/alter_file_format.q.out +++ b/ql/src/test/results/clientnegative/alter_file_format.q.out @@ -24,6 +24,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out b/ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out index 4faf327..e4e4d38 100644 --- a/ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out +++ b/ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out @@ -66,7 +66,10 @@ Database: default Table: analyze_srcpart_partial_scan #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5077 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientnegative/unset_table_property.q.out b/ql/src/test/results/clientnegative/unset_table_property.q.out index 0510788..0705b92 100644 --- a/ql/src/test/results/clientnegative/unset_table_property.q.out +++ b/ql/src/test/results/clientnegative/unset_table_property.q.out @@ -22,6 +22,8 @@ a 1 c 3 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### FAILED: SemanticException [Error 10215]: Please use the following syntax if not sure whether the property existed or not: diff --git a/ql/src/test/results/clientpositive/allcolref_in_udf.q.out b/ql/src/test/results/clientpositive/allcolref_in_udf.q.out index eda49ed..792b929 100644 --- a/ql/src/test/results/clientpositive/allcolref_in_udf.q.out +++ b/ql/src/test/results/clientpositive/allcolref_in_udf.q.out @@ -187,20 +187,20 @@ STAGE PLANS: Map Operator Tree: TableScan alias: allcolref - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c0 (type: array) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 5812 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 5812 Basic stats: COMPLETE Column stats: NONE function name: explode Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 5812 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 5812 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/alter_file_format.q.out b/ql/src/test/results/clientpositive/alter_file_format.q.out index 5d83b23..6b5ea62 100644 --- a/ql/src/test/results/clientpositive/alter_file_format.q.out +++ b/ql/src/test/results/clientpositive/alter_file_format.q.out @@ -24,6 +24,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -64,6 +69,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -105,6 +112,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -146,6 +155,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -187,6 +198,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -228,6 +241,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -269,6 +284,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out b/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out index 3b71598..786a11c 100644 --- a/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out +++ b/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2_h23.q.out @@ -35,6 +35,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -129,6 +134,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -223,6 +234,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -317,6 +334,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -411,6 +434,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -505,6 +534,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -599,6 +634,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -693,6 +734,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -787,6 +834,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out b/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out index cab3de4..d566c57 100644 --- a/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out +++ b/ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table_h23.q.out @@ -39,6 +39,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -184,6 +190,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -230,6 +242,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -312,6 +330,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -399,6 +423,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -446,6 +476,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -493,6 +529,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -540,6 +582,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out b/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out index 184d2e4..9273d08 100644 --- a/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out +++ b/ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out @@ -190,7 +190,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} SORTBUCKETCOLSPREFIX TRUE + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/alter_partition_coltype.q.out b/ql/src/test/results/clientpositive/alter_partition_coltype.q.out index a184372..641248e 100644 --- a/ql/src/test/results/clientpositive/alter_partition_coltype.q.out +++ b/ql/src/test/results/clientpositive/alter_partition_coltype.q.out @@ -171,17 +171,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype + numFiles 0 + numRows 0 partition_columns dt/ts partition_columns.types int:string + rawDataSize 0 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -217,17 +222,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype + numFiles 0 + numRows 0 partition_columns dt/ts partition_columns.types int:string + rawDataSize 0 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -398,17 +408,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype + numFiles 0 + numRows 0 partition_columns dt/ts partition_columns.types string:double + rawDataSize 0 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -557,17 +572,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype + numFiles 0 + numRows 0 partition_columns dt/ts partition_columns.types string:double + rawDataSize 0 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -746,17 +766,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype + numFiles 0 + numRows 0 partition_columns dt/ts partition_columns.types string:double + rawDataSize 0 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -790,17 +815,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.alter_coltype + numFiles 0 + numRows 0 partition_columns dt/ts partition_columns.types string:double + rawDataSize 0 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -1015,17 +1045,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns intcol columns.comments columns.types string #### A masked pattern was here #### name pt.alterdynamic_part_table + numFiles 0 + numRows 0 partition_columns partcol1/partcol2 partition_columns.types int:string + rawDataSize 0 serialization.ddl struct alterdynamic_part_table { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: pt.alterdynamic_part_table @@ -1120,17 +1155,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns intcol columns.comments columns.types string #### A masked pattern was here #### name pt.alterdynamic_part_table + numFiles 0 + numRows 0 partition_columns partcol1/partcol2 partition_columns.types int:string + rawDataSize 0 serialization.ddl struct alterdynamic_part_table { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: pt.alterdynamic_part_table diff --git a/ql/src/test/results/clientpositive/alter_skewed_table.q.out b/ql/src/test/results/clientpositive/alter_skewed_table.q.out index a1caa99..0f60ba3 100644 --- a/ql/src/test/results/clientpositive/alter_skewed_table.q.out +++ b/ql/src/test/results/clientpositive/alter_skewed_table.q.out @@ -24,6 +24,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -64,6 +69,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -119,6 +126,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -159,6 +171,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -208,6 +222,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -250,6 +269,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out b/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out index 6e1ec59..566b804 100644 --- a/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out +++ b/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out @@ -24,7 +24,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} SORTBUCKETCOLSPREFIX TRUE + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -66,6 +71,8 @@ Table Parameters: SORTBUCKETCOLSPREFIX TRUE #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/alter_table_serde2.q.out b/ql/src/test/results/clientpositive/alter_table_serde2.q.out index dd946e5..770fb1d 100644 --- a/ql/src/test/results/clientpositive/alter_table_serde2.q.out +++ b/ql/src/test/results/clientpositive/alter_table_serde2.q.out @@ -35,6 +35,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -131,6 +136,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out b/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out index 72a24d5..6155edf 100644 --- a/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out +++ b/ql/src/test/results/clientpositive/analyze_table_null_partition.q.out @@ -144,17 +144,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns name columns.comments columns.types string #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns age partition_columns.types int + rawDataSize 0 serialization.ddl struct test2 { string name} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -187,17 +192,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns name columns.comments columns.types string #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns age partition_columns.types int + rawDataSize 0 serialization.ddl struct test2 { string name} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -230,17 +240,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns name columns.comments columns.types string #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns age partition_columns.types int + rawDataSize 0 serialization.ddl struct test2 { string name} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -273,17 +288,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns name columns.comments columns.types string #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns age partition_columns.types int + rawDataSize 0 serialization.ddl struct test2 { string name} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out index f768ea4..940629f 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out @@ -198,8 +198,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -210,6 +212,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -218,11 +221,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -243,8 +250,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -255,6 +264,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -263,11 +273,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -419,8 +433,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -431,6 +447,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -439,11 +456,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -464,8 +485,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -476,6 +499,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -484,11 +508,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -617,8 +645,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -629,6 +659,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -637,11 +668,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -719,8 +754,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -731,6 +768,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -739,11 +777,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -764,8 +806,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -776,6 +820,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -784,11 +829,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -808,8 +857,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -820,6 +871,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -828,11 +880,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -890,8 +946,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -902,6 +960,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -910,11 +969,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -934,8 +997,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -946,6 +1011,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -954,11 +1020,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1036,8 +1106,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1048,6 +1120,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1056,11 +1129,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1081,8 +1158,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1093,6 +1172,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1101,11 +1181,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1125,8 +1209,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1137,6 +1223,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1145,11 +1232,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1236,8 +1327,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1248,6 +1341,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1256,11 +1350,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1281,8 +1379,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1293,6 +1393,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1301,11 +1402,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out index f4bbfd0..3ac68cf 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out @@ -165,8 +165,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -177,6 +179,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -184,11 +187,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -269,8 +276,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -281,6 +290,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -288,11 +298,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -313,8 +327,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -325,6 +341,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -332,11 +349,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -357,8 +378,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -369,6 +392,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -376,11 +400,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -513,8 +541,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -525,6 +555,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -532,11 +563,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -617,8 +652,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -629,6 +666,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -636,11 +674,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -661,8 +703,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -673,6 +717,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -680,11 +725,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -705,8 +754,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -717,6 +768,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -724,11 +776,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -859,8 +915,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -871,6 +929,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -878,11 +937,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -963,8 +1026,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -975,6 +1040,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -982,11 +1048,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1007,8 +1077,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1019,6 +1091,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1026,11 +1099,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1176,8 +1253,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1188,6 +1267,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -1195,11 +1275,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1222,8 +1306,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1234,6 +1320,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1241,11 +1328,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1264,8 +1355,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1276,6 +1369,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1283,11 +1377,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1387,8 +1485,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1399,6 +1499,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1406,11 +1507,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1431,8 +1536,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1443,6 +1550,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1450,11 +1558,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out index e5ff904..f7f56d4 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out @@ -231,8 +231,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -243,6 +245,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -251,11 +254,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -279,8 +286,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_medium numFiles 3 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -291,6 +300,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key @@ -299,11 +309,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_medium + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -327,8 +341,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_medium numFiles 3 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -339,6 +355,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key @@ -347,11 +364,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_medium + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -474,8 +495,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -486,6 +509,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -494,11 +518,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -519,8 +547,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -531,6 +561,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -539,11 +570,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -564,8 +599,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_medium numFiles 3 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -576,6 +613,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key @@ -584,11 +622,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_medium + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -609,8 +651,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -621,6 +665,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -629,11 +674,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out index 0de079d..cfd325e 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out @@ -178,8 +178,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -190,6 +192,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -198,11 +201,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -223,8 +230,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -235,6 +244,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -243,11 +253,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -378,8 +392,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -390,6 +406,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -398,11 +415,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -480,8 +501,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -492,6 +515,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -500,11 +524,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -525,8 +553,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -537,6 +567,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -545,11 +576,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -569,8 +604,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -581,6 +618,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -589,11 +627,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -651,8 +693,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -663,6 +707,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -671,11 +716,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -695,8 +744,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -707,6 +758,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -715,11 +767,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -797,8 +853,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -809,6 +867,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -817,11 +876,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -842,8 +905,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -854,6 +919,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -862,11 +928,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -886,8 +956,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -898,6 +970,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -906,11 +979,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -997,8 +1074,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1009,6 +1088,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1017,11 +1097,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1042,8 +1126,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1054,6 +1140,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1062,11 +1149,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1116,6 +1207,8 @@ PREHOOK: Input: default@bucket_big@ds=2008-04-09 PREHOOK: Input: default@bucket_small PREHOOK: Input: default@bucket_small@ds=2008-04-08 #### A masked pattern was here #### +FAILED: Execution Error, return code 3 from org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask +ATTEMPT: Execute BackupTask: org.apache.hadoop.hive.ql.exec.mr.MapRedTask POSTHOOK: query: select count(*) FROM bucket_big a JOIN bucket_small b ON a.key = b.key POSTHOOK: type: QUERY POSTHOOK: Input: default@bucket_big diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out index a161f1c..d18e84f 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out @@ -178,8 +178,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -190,6 +192,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -198,11 +201,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -353,8 +360,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -365,6 +374,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -373,11 +383,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -505,8 +519,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -517,6 +533,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -525,11 +542,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -548,8 +569,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -560,6 +583,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -568,11 +592,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -650,8 +678,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -662,6 +692,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -670,11 +701,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -694,8 +729,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -706,6 +743,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -714,11 +752,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -738,8 +780,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -750,6 +794,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -758,11 +803,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -819,8 +868,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -831,6 +882,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -839,11 +891,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -921,8 +977,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -933,6 +991,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -941,11 +1000,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -965,8 +1028,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -977,6 +1042,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -985,11 +1051,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1009,8 +1079,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1021,6 +1093,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1029,11 +1102,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1121,8 +1198,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1133,6 +1212,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1141,11 +1221,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out index 3421ab1..bb63666 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out @@ -194,8 +194,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -206,6 +208,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -214,11 +217,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -369,8 +376,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -381,6 +390,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -389,11 +399,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -521,8 +535,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -533,6 +549,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -541,11 +558,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -564,8 +585,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -576,6 +599,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -584,11 +608,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -666,8 +694,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -678,6 +708,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -686,11 +717,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -710,8 +745,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -722,6 +759,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -730,11 +768,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -754,8 +796,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -766,6 +810,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -774,11 +819,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -835,8 +884,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -847,6 +898,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -855,11 +907,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -937,8 +993,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -949,6 +1007,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -957,11 +1016,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -981,8 +1044,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -993,6 +1058,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1001,11 +1067,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1025,8 +1095,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1037,6 +1109,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1045,11 +1118,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1137,8 +1214,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1149,6 +1228,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1157,11 +1237,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out index 9c2663c..0537c35 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out @@ -164,6 +164,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -183,6 +185,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -330,6 +334,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -349,6 +355,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -531,6 +539,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -550,6 +560,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -587,6 +599,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -703,6 +717,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -722,6 +738,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -759,6 +777,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -848,6 +868,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -867,6 +889,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out index a25db01..d3b7be3 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out @@ -211,8 +211,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -223,6 +225,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -231,11 +234,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -256,8 +263,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -268,6 +277,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -276,11 +286,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -434,8 +448,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -446,6 +462,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -454,11 +471,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -479,8 +500,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -491,6 +514,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -499,11 +523,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -634,8 +662,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -646,6 +676,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -654,11 +685,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -677,8 +712,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -689,6 +726,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -697,11 +735,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -779,8 +821,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -791,6 +835,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -799,11 +844,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -824,8 +873,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -836,6 +887,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -844,11 +896,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -868,8 +924,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -880,6 +938,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -888,11 +947,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -912,8 +975,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -924,6 +989,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -932,11 +998,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -994,8 +1064,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1006,6 +1078,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1014,11 +1087,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1038,8 +1115,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1050,6 +1129,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1058,11 +1138,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1140,8 +1224,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1152,6 +1238,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1160,11 +1247,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1185,8 +1276,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1197,6 +1290,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1205,11 +1299,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1229,8 +1327,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1241,6 +1341,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1249,11 +1350,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1273,8 +1378,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1285,6 +1392,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1293,11 +1401,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1385,8 +1497,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1397,6 +1511,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1405,11 +1520,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1430,8 +1549,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1442,6 +1563,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1450,11 +1572,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out index d22ba3b..be72acc 100644 --- a/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out +++ b/ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out @@ -211,8 +211,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -223,6 +225,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -231,11 +234,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -256,8 +263,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -268,6 +277,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -276,11 +286,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -434,8 +448,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -446,6 +462,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -454,11 +471,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -479,8 +500,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -491,6 +514,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -499,11 +523,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -636,8 +664,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -648,6 +678,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -656,11 +687,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -679,8 +714,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -691,6 +728,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -699,11 +737,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -781,8 +823,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -793,6 +837,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -801,11 +846,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -826,8 +875,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -838,6 +889,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -846,11 +898,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -870,8 +926,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -882,6 +940,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -890,11 +949,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -914,8 +977,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -926,6 +991,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -934,11 +1000,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -996,8 +1066,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1008,6 +1080,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1016,11 +1089,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1040,8 +1117,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1052,6 +1131,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1060,11 +1140,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1142,8 +1226,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1154,6 +1240,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1162,11 +1249,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1187,8 +1278,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1199,6 +1292,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1207,11 +1301,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1231,8 +1329,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1243,6 +1343,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1251,11 +1352,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1275,8 +1380,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1287,6 +1394,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1295,11 +1403,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1387,8 +1499,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1399,6 +1513,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1407,11 +1522,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1432,8 +1551,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1444,6 +1565,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1452,11 +1574,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/binary_output_format.q.out b/ql/src/test/results/clientpositive/binary_output_format.q.out index 32b2feb..57225de 100644 --- a/ql/src/test/results/clientpositive/binary_output_format.q.out +++ b/ql/src/test/results/clientpositive/binary_output_format.q.out @@ -135,16 +135,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns mydata columns.comments columns.types string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -219,16 +224,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns mydata columns.comments columns.types string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -251,16 +261,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns mydata columns.comments columns.types string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -276,32 +291,42 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns mydata columns.comments columns.types string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns mydata columns.comments columns.types string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -323,16 +348,21 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns mydata columns.comments columns.types string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -348,32 +378,42 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns mydata columns.comments columns.types string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveBinaryOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns mydata columns.comments columns.types string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string mydata} serialization.format 1 serialization.last.column.takes.rest true serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/bucket1.q.out b/ql/src/test/results/clientpositive/bucket1.q.out index 0680176..32d0aca 100644 --- a/ql/src/test/results/clientpositive/bucket1.q.out +++ b/ql/src/test/results/clientpositive/bucket1.q.out @@ -126,6 +126,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 100 bucket_field_name key columns key,value @@ -133,9 +134,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket1_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket1_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket1_1 @@ -152,6 +157,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 100 bucket_field_name key columns key,value @@ -159,9 +165,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket1_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket1_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket1_1 diff --git a/ql/src/test/results/clientpositive/bucket2.q.out b/ql/src/test/results/clientpositive/bucket2.q.out index 958e556..26c8d16 100644 --- a/ql/src/test/results/clientpositive/bucket2.q.out +++ b/ql/src/test/results/clientpositive/bucket2.q.out @@ -126,6 +126,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -133,9 +134,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 @@ -152,6 +157,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -159,9 +165,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 diff --git a/ql/src/test/results/clientpositive/bucket3.q.out b/ql/src/test/results/clientpositive/bucket3.q.out index eca9ba5..033f49e 100644 --- a/ql/src/test/results/clientpositive/bucket3.q.out +++ b/ql/src/test/results/clientpositive/bucket3.q.out @@ -131,6 +131,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -138,11 +139,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket3_1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 @@ -161,6 +166,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -168,11 +174,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket3_1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out index 05fb434..012f014 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_1.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_1.q.out @@ -182,6 +182,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.table1 numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct table1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -201,6 +203,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.table1 numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct table1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out index 1173c93..c613e2f 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_2.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_2.q.out @@ -182,6 +182,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.table1 numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct table1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -201,6 +203,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.table1 numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct table1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out index 3b1912a..ac256a1 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_spark1.q.out @@ -197,8 +197,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -209,6 +211,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -216,11 +219,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -284,15 +291,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -320,8 +332,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -332,6 +346,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -339,11 +354,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -364,8 +383,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -376,6 +397,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -383,11 +405,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -404,15 +430,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -559,8 +590,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -571,6 +604,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -578,11 +612,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -687,8 +725,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -699,6 +739,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -706,11 +747,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -731,8 +776,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -743,6 +790,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -750,11 +798,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out index 9a6aef7..bb5e332 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_spark2.q.out @@ -181,8 +181,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -193,6 +195,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -200,11 +203,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -268,15 +275,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -304,8 +316,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -316,6 +330,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -323,11 +338,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -348,8 +367,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -360,6 +381,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -367,11 +389,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -388,15 +414,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -543,8 +574,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -555,6 +588,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -562,11 +596,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -671,8 +709,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -683,6 +723,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -690,11 +731,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -715,8 +760,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -727,6 +774,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -734,11 +782,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 diff --git a/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out b/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out index 1d2166f..2158bb3 100644 --- a/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out +++ b/ql/src/test/results/clientpositive/bucket_map_join_spark3.q.out @@ -181,8 +181,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -193,6 +195,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -200,11 +203,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -268,15 +275,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -304,8 +316,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -316,6 +330,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -323,11 +338,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -348,8 +367,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -360,6 +381,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -367,11 +389,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -388,15 +414,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -543,8 +574,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -555,6 +588,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -562,11 +596,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -671,8 +709,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -683,6 +723,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -690,11 +731,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -715,8 +760,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -727,6 +774,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -734,11 +782,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 diff --git a/ql/src/test/results/clientpositive/bucketcontext_1.q.out b/ql/src/test/results/clientpositive/bucketcontext_1.q.out index 78a0c8c..f45e910 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_1.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_1.q.out @@ -167,8 +167,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -179,6 +181,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -187,11 +190,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -272,8 +279,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -284,6 +293,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -292,11 +302,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -317,8 +331,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -329,6 +345,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -337,11 +354,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -496,8 +517,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -508,6 +531,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -516,11 +540,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -541,8 +569,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -553,6 +583,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -561,11 +592,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/bucketcontext_2.q.out b/ql/src/test/results/clientpositive/bucketcontext_2.q.out index bc08fd2..2d052db 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_2.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_2.q.out @@ -151,8 +151,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -163,6 +165,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -171,11 +174,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -256,8 +263,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -268,6 +277,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -276,11 +286,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -301,8 +315,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -313,6 +329,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -321,11 +338,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -480,8 +501,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -492,6 +515,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -500,11 +524,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -525,8 +553,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -537,6 +567,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -545,11 +576,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/bucketcontext_3.q.out b/ql/src/test/results/clientpositive/bucketcontext_3.q.out index 0cba2ca..38ed880 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_3.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_3.q.out @@ -151,8 +151,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -163,6 +165,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -171,11 +174,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -194,8 +201,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -206,6 +215,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -214,11 +224,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -299,8 +313,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -311,6 +327,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -319,11 +336,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -477,8 +498,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -489,6 +512,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -497,11 +521,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/bucketcontext_4.q.out b/ql/src/test/results/clientpositive/bucketcontext_4.q.out index 652b377..64b4c74 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_4.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_4.q.out @@ -167,8 +167,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -179,6 +181,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -187,11 +190,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -210,8 +217,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -222,6 +231,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -230,11 +240,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -315,8 +329,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -327,6 +343,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -335,11 +352,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -493,8 +514,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -505,6 +528,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -513,11 +537,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/bucketcontext_5.q.out b/ql/src/test/results/clientpositive/bucketcontext_5.q.out index ee4532a..e191a4d 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_5.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_5.q.out @@ -192,6 +192,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -211,6 +213,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -361,6 +365,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -380,6 +386,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/bucketcontext_6.q.out b/ql/src/test/results/clientpositive/bucketcontext_6.q.out index e280b95..52b49e0 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_6.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_6.q.out @@ -211,8 +211,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -223,6 +225,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -231,11 +234,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -256,8 +263,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -268,6 +277,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -276,11 +286,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -433,8 +447,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -445,6 +461,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -453,11 +470,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -478,8 +499,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -490,6 +513,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -498,11 +522,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/bucketcontext_7.q.out b/ql/src/test/results/clientpositive/bucketcontext_7.q.out index b44ae95..294bab1 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_7.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_7.q.out @@ -184,8 +184,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -196,6 +198,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -204,11 +207,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -227,8 +234,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -239,6 +248,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -247,11 +257,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -332,8 +346,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -344,6 +360,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -352,11 +369,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -377,8 +398,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -389,6 +412,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -397,11 +421,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -558,8 +586,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -570,6 +600,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -578,11 +609,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -603,8 +638,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -615,6 +652,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -623,11 +661,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/bucketcontext_8.q.out b/ql/src/test/results/clientpositive/bucketcontext_8.q.out index eee7b57..2da9db9 100644 --- a/ql/src/test/results/clientpositive/bucketcontext_8.q.out +++ b/ql/src/test/results/clientpositive/bucketcontext_8.q.out @@ -184,8 +184,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -196,6 +198,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -204,11 +207,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -227,8 +234,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -239,6 +248,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -247,11 +257,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -332,8 +346,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -344,6 +360,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -352,11 +369,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -377,8 +398,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -389,6 +412,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -397,11 +421,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -558,8 +586,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -570,6 +600,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -578,11 +609,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -603,8 +638,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -615,6 +652,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -623,11 +661,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/bucketmapjoin1.q.out b/ql/src/test/results/clientpositive/bucketmapjoin1.q.out index ac8a76f..9d9642e 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin1.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin1.q.out @@ -554,8 +554,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -566,6 +568,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -573,11 +576,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -641,15 +648,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -675,6 +687,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -693,6 +707,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -722,15 +738,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -753,15 +774,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -777,30 +803,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -822,15 +858,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -846,30 +887,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -1171,8 +1222,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1183,6 +1236,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1190,11 +1244,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part diff --git a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out index 041f2d0..75aa6cf 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin10.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin10.q.out @@ -218,8 +218,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 3 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -230,6 +232,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -237,11 +240,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -260,8 +267,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -272,6 +281,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -279,11 +289,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -356,8 +370,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -368,6 +384,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -375,11 +392,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -400,8 +421,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 3 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -412,6 +435,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -419,11 +443,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out index 5e9fbe4..d53412d 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin11.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin11.q.out @@ -228,8 +228,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 4 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -240,6 +242,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -247,11 +250,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -270,8 +277,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -282,6 +291,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -289,11 +299,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -374,8 +388,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -386,6 +402,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -393,11 +410,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -418,8 +439,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 4 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -430,6 +453,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -437,11 +461,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -606,8 +634,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 4 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -618,6 +648,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -625,11 +656,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -648,8 +683,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -660,6 +697,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -667,11 +705,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -752,8 +794,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -764,6 +808,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -771,11 +816,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -796,8 +845,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 4 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -808,6 +859,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -815,11 +867,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out index 3a38d7b..7c6d2d9 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin12.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin12.q.out @@ -187,8 +187,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -199,17 +201,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -290,8 +297,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -302,6 +311,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -309,11 +319,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -466,8 +480,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_3 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -478,6 +494,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -485,11 +502,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_3 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_3 @@ -562,8 +583,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -574,6 +597,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -581,11 +605,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin13.q.out b/ql/src/test/results/clientpositive/bucketmapjoin13.q.out index c78b7a1..6ccaed9 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin13.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin13.q.out @@ -161,6 +161,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -168,11 +169,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -260,6 +265,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -267,11 +273,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -307,6 +317,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -314,11 +325,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -483,6 +498,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -490,11 +506,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -590,6 +610,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -597,11 +618,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -764,6 +789,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -771,11 +797,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -871,6 +901,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -878,11 +909,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -1047,6 +1082,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -1054,11 +1090,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1154,6 +1194,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name value columns key,value @@ -1161,11 +1202,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin2.q.out index d9c4463..44f74d4 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin2.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin2.q.out @@ -190,8 +190,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -202,6 +204,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -209,11 +212,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -277,15 +284,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -313,8 +325,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -325,6 +339,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -332,11 +347,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -362,15 +381,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -393,15 +417,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -417,30 +446,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -462,15 +501,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -486,30 +530,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -730,8 +784,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -742,6 +798,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -749,11 +806,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -858,8 +919,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -870,6 +933,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -877,11 +941,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1322,8 +1390,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1334,6 +1404,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -1341,11 +1412,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1364,8 +1439,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1376,6 +1453,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -1383,11 +1461,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1492,8 +1574,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1504,6 +1588,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1511,11 +1596,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part diff --git a/ql/src/test/results/clientpositive/bucketmapjoin3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin3.q.out index 77b19ba..bd46a9d 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin3.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin3.q.out @@ -221,8 +221,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -233,6 +235,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -240,11 +243,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -308,15 +315,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -344,8 +356,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -356,6 +370,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -363,11 +378,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -393,15 +412,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -424,15 +448,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -448,30 +477,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -493,15 +532,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -517,30 +561,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -768,8 +822,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -780,6 +836,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -787,11 +844,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -896,8 +957,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -908,6 +971,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -915,11 +979,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part diff --git a/ql/src/test/results/clientpositive/bucketmapjoin4.q.out b/ql/src/test/results/clientpositive/bucketmapjoin4.q.out index 5858160..69e48e4 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin4.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin4.q.out @@ -251,15 +251,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -285,6 +290,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -303,6 +310,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -332,15 +341,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -363,15 +377,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -387,30 +406,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -432,15 +461,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -456,30 +490,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -764,6 +808,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -782,6 +828,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out index 728c0b7..5261047 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin5.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin5.q.out @@ -301,15 +301,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -337,8 +342,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -349,6 +356,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -356,11 +364,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -381,8 +393,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -393,6 +407,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -400,11 +415,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -431,15 +450,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -462,15 +486,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -486,30 +515,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -531,15 +570,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -555,30 +599,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -877,8 +931,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -889,6 +945,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -896,11 +953,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -921,8 +982,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -933,6 +996,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -940,11 +1004,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out index 804c882..4e26d1d 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin8.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin8.q.out @@ -152,8 +152,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -164,6 +166,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -171,11 +174,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -256,8 +263,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -268,6 +277,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -275,11 +285,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -441,8 +455,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -453,6 +469,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name value columns key,value @@ -460,11 +477,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -545,8 +566,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -557,6 +580,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -564,11 +588,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out index cc0672e..b63fb47 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin9.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin9.q.out @@ -160,8 +160,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 3 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -172,6 +174,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -179,11 +182,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -256,8 +263,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -268,6 +277,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -275,11 +285,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -474,8 +488,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -486,6 +502,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -493,11 +510,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -570,8 +591,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -582,6 +605,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -589,11 +613,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out index 36b1cfe..0aa9cf4 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out @@ -165,8 +165,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 3 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -177,6 +179,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -184,11 +187,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -244,15 +251,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -278,6 +290,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -296,6 +310,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -325,15 +341,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -356,15 +377,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -380,30 +406,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -425,15 +461,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -449,30 +490,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out index 21138d6..c310391 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out @@ -167,8 +167,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -179,6 +181,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -186,11 +189,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -209,8 +216,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -221,6 +230,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -228,11 +238,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -296,15 +310,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -330,6 +349,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -348,6 +369,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -377,15 +400,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -408,15 +436,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -432,30 +465,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -477,15 +520,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -501,30 +549,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result diff --git a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out index c95681a..82d4b6a 100644 --- a/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out +++ b/ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out @@ -296,6 +296,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -315,6 +317,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -484,6 +488,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -503,6 +509,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -661,6 +669,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -680,6 +690,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -841,6 +853,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -860,6 +874,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1021,6 +1037,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1040,6 +1058,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1201,6 +1221,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1220,6 +1242,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1381,6 +1405,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1400,6 +1426,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1561,6 +1589,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1580,6 +1610,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1741,6 +1773,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test3 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1760,6 +1794,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test3 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/cbo_SortUnionTransposeRule.q.out b/ql/src/test/results/clientpositive/cbo_SortUnionTransposeRule.q.out index 7542882..cfff383 100644 --- a/ql/src/test/results/clientpositive/cbo_SortUnionTransposeRule.q.out +++ b/ql/src/test/results/clientpositive/cbo_SortUnionTransposeRule.q.out @@ -32,38 +32,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -119,38 +119,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -186,43 +186,43 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -479,38 +479,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -558,9 +558,7 @@ limit 5 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage - Stage-2 depends on stages: Stage-1, Stage-3 - Stage-3 is a root stage - Stage-0 depends on stages: Stage-2 + Stage-0 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-1 @@ -568,96 +566,42 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - sort order: - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - TopN Hash Memory Usage: 0.1 - value expressions: _col0 (type: string) - Reduce Operator Tree: - Select Operator - expressions: VALUE._col0 (type: string) - outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - Stage: Stage-2 - Map Reduce - Map Operator Tree: - TableScan - Union - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - TableScan - Union - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - Stage: Stage-3 - Map Reduce - Map Operator Tree: + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - sort order: - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - TopN Hash Memory Usage: 0.1 - value expressions: _col0 (type: string) - Reduce Operator Tree: - Select Operator - expressions: VALUE._col0 (type: string) - outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + Limit + Number of rows: 5 + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -681,9 +625,7 @@ limit 5 POSTHOOK: type: QUERY STAGE DEPENDENCIES: Stage-1 is a root stage - Stage-2 depends on stages: Stage-1, Stage-3 - Stage-3 is a root stage - Stage-0 depends on stages: Stage-2 + Stage-0 depends on stages: Stage-1 STAGE PLANS: Stage: Stage-1 @@ -691,95 +633,47 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE - TopN Hash Memory Usage: 0.1 - Reduce Operator Tree: - Select Operator - expressions: KEY.reducesinkkey0 (type: string) - outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe - - Stage: Stage-2 - Map Reduce - Map Operator Tree: - TableScan - Union - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE - TopN Hash Memory Usage: 0.1 - TableScan - Union - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE - TopN Hash Memory Usage: 0.1 - Reduce Operator Tree: - Select Operator - expressions: KEY.reducesinkkey0 (type: string) - outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE - Limit - Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - File Output Operator - compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - - Stage: Stage-3 - Map Reduce - Map Operator Tree: + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col0 (type: string) - sort order: + - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE - TopN Hash Memory Usage: 0.1 + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Union + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col0 (type: string) + sort order: + + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE + TopN Hash Memory Usage: 0.1 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Stage: Stage-0 Fetch Operator @@ -1088,38 +982,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1153,38 +1047,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan alias: a - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 50 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out b/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out index 89f1746..5495147 100644 --- a/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out +++ b/ql/src/test/results/clientpositive/cbo_rp_cross_product_check_2.q.out @@ -53,11 +53,11 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -68,11 +68,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -80,10 +80,10 @@ STAGE PLANS: 0 1 outputColumnNames: key, value, key0, value0 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -121,11 +121,11 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -133,14 +133,14 @@ STAGE PLANS: d1 TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: string) @@ -151,14 +151,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d2 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -166,7 +166,7 @@ STAGE PLANS: 0 key (type: string) 1 key (type: string) outputColumnNames: key, value, key0, value0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -174,10 +174,10 @@ STAGE PLANS: 0 1 outputColumnNames: key, value, key0, value0, key1, value1 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -220,14 +220,14 @@ STAGE PLANS: od1:d1 TableScan alias: od1:d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: string) @@ -238,14 +238,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: od1:d2 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -253,21 +253,21 @@ STAGE PLANS: 0 key (type: string) 1 key (type: string) outputColumnNames: key - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: key (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -275,7 +275,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: key - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -293,11 +293,11 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -314,10 +314,10 @@ STAGE PLANS: 0 1 outputColumnNames: key, value, key0 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -355,11 +355,11 @@ STAGE PLANS: od1:d1 TableScan alias: od1:d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -370,9 +370,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: od1:d2 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: Inner Join 0 to 1 @@ -380,21 +380,21 @@ STAGE PLANS: 0 1 outputColumnNames: key - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: key (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -402,7 +402,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: key - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -420,11 +420,11 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -441,10 +441,10 @@ STAGE PLANS: 0 1 outputColumnNames: key, value, key0 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -487,27 +487,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: ss:a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: key (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: key - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -543,10 +543,10 @@ STAGE PLANS: 0 1 outputColumnNames: key, key0 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -579,10 +579,10 @@ STAGE PLANS: 0 1 outputColumnNames: key, key0 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -596,12 +596,12 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: string) TableScan Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: string) Reduce Operator Tree: Join Operator @@ -611,10 +611,10 @@ STAGE PLANS: 0 1 outputColumnNames: key, key0 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -630,14 +630,14 @@ STAGE PLANS: od1:d1 TableScan alias: od1:d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: string) @@ -648,14 +648,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: od1:d2 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -663,21 +663,21 @@ STAGE PLANS: 0 key (type: string) 1 key (type: string) outputColumnNames: key - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: key (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -685,7 +685,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: key - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: diff --git a/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.java1.7.out b/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.java1.7.out index f680a02..a649ab8 100644 --- a/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.java1.7.out +++ b/ql/src/test/results/clientpositive/cbo_rp_outer_join_ppr.q.java1.7.out @@ -226,17 +226,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -272,17 +277,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -318,17 +328,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -364,17 +379,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -709,17 +729,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -755,17 +780,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out index 8c9664d..d812193 100644 --- a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out +++ b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_1.q.out @@ -909,6 +909,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out b/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out index 46c285e..18314b5 100644 --- a/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out +++ b/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out @@ -25,6 +25,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/columnstats_partlvl.q.out b/ql/src/test/results/clientpositive/columnstats_partlvl.q.out index bfdc3b6..00bc7d0 100644 --- a/ql/src/test/results/clientpositive/columnstats_partlvl.q.out +++ b/ql/src/test/results/clientpositive/columnstats_partlvl.q.out @@ -149,8 +149,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.employee_part numFiles 1 + numRows 0 partition_columns employeesalary partition_columns.types double + rawDataSize 0 serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -161,6 +163,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns employeeid,employeename columns.comments @@ -168,11 +171,15 @@ STAGE PLANS: field.delim | #### A masked pattern was here #### name default.employee_part + numFiles 0 + numRows 0 partition_columns employeesalary partition_columns.types double + rawDataSize 0 serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.employee_part @@ -348,8 +355,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.employee_part numFiles 1 + numRows 0 partition_columns employeesalary partition_columns.types double + rawDataSize 0 serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -360,6 +369,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns employeeid,employeename columns.comments @@ -367,11 +377,15 @@ STAGE PLANS: field.delim | #### A masked pattern was here #### name default.employee_part + numFiles 0 + numRows 0 partition_columns employeesalary partition_columns.types double + rawDataSize 0 serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.employee_part diff --git a/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out b/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out index 96cf8fd..fd66028 100644 --- a/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out +++ b/ql/src/test/results/clientpositive/columnstats_tbllvl.q.out @@ -143,6 +143,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.uservisits_web_text_none numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -161,6 +163,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.uservisits_web_text_none numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -545,6 +549,8 @@ STAGE PLANS: #### A masked pattern was here #### name dummydb.uservisits_in_dummy_db numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct uservisits_in_dummy_db { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -563,6 +569,8 @@ STAGE PLANS: #### A masked pattern was here #### name dummydb.uservisits_in_dummy_db numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct uservisits_in_dummy_db { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/combine2.q.out b/ql/src/test/results/clientpositive/combine2.q.out index 8c59816..c67ec4f 100644 --- a/ql/src/test/results/clientpositive/combine2.q.out +++ b/ql/src/test/results/clientpositive/combine2.q.out @@ -234,17 +234,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 + numFiles 0 + numRows 0 partition_columns value partition_columns.types string + rawDataSize 0 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -279,17 +284,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 + numFiles 0 + numRows 0 partition_columns value partition_columns.types string + rawDataSize 0 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -324,17 +334,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 + numFiles 0 + numRows 0 partition_columns value partition_columns.types string + rawDataSize 0 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -369,17 +384,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 + numFiles 0 + numRows 0 partition_columns value partition_columns.types string + rawDataSize 0 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -414,17 +434,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 + numFiles 0 + numRows 0 partition_columns value partition_columns.types string + rawDataSize 0 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -459,17 +484,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 + numFiles 0 + numRows 0 partition_columns value partition_columns.types string + rawDataSize 0 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -504,17 +534,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 + numFiles 0 + numRows 0 partition_columns value partition_columns.types string + rawDataSize 0 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -549,17 +584,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key columns.comments columns.types string #### A masked pattern was here #### name default.combine2 + numFiles 0 + numRows 0 partition_columns value partition_columns.types string + rawDataSize 0 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 diff --git a/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out b/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out index c4f51a3..f68bcb1 100644 --- a/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out +++ b/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out @@ -35,6 +35,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -81,6 +86,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -127,6 +134,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -180,6 +189,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -226,6 +237,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -271,6 +284,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/create_like.q.out b/ql/src/test/results/clientpositive/create_like.q.out index 9241b68..8666e02 100644 --- a/ql/src/test/results/clientpositive/create_like.q.out +++ b/ql/src/test/results/clientpositive/create_like.q.out @@ -24,6 +24,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -344,6 +349,8 @@ Table Parameters: k2 v2 #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### @@ -470,6 +477,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/create_like_view.q.out b/ql/src/test/results/clientpositive/create_like_view.q.out index e2dc2c4..45fa4ef 100644 --- a/ql/src/test/results/clientpositive/create_like_view.q.out +++ b/ql/src/test/results/clientpositive/create_like_view.q.out @@ -52,6 +52,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/create_skewed_table1.q.out b/ql/src/test/results/clientpositive/create_skewed_table1.q.out index 415bb77..fe5ea0f 100644 --- a/ql/src/test/results/clientpositive/create_skewed_table1.q.out +++ b/ql/src/test/results/clientpositive/create_skewed_table1.q.out @@ -40,6 +40,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -72,6 +77,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -105,6 +115,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/cross_product_check_1.q.out b/ql/src/test/results/clientpositive/cross_product_check_1.q.out index 4feb798..8e5844c 100644 --- a/ql/src/test/results/clientpositive/cross_product_check_1.q.out +++ b/ql/src/test/results/clientpositive/cross_product_check_1.q.out @@ -47,25 +47,25 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) TableScan alias: b - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Reduce Operator Tree: Join Operator @@ -75,10 +75,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -106,35 +106,35 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -144,7 +144,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -158,18 +158,18 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Reduce Operator Tree: Join Operator @@ -179,10 +179,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -217,34 +217,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -253,12 +253,12 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -274,13 +274,13 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -293,19 +293,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) TableScan Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reduce Operator Tree: Join Operator @@ -315,10 +315,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -348,23 +348,23 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Operator Tree: Join Operator condition map: @@ -373,12 +373,12 @@ STAGE PLANS: 0 1 outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -394,13 +394,13 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -413,19 +413,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) TableScan Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reduce Operator Tree: Join Operator @@ -435,10 +435,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -472,27 +472,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: key (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -506,12 +506,12 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) TableScan Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reduce Operator Tree: Join Operator @@ -521,10 +521,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -535,34 +535,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -571,12 +571,12 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -592,13 +592,13 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: diff --git a/ql/src/test/results/clientpositive/cross_product_check_2.q.out b/ql/src/test/results/clientpositive/cross_product_check_2.q.out index f34f2b5..8a5ae8d 100644 --- a/ql/src/test/results/clientpositive/cross_product_check_2.q.out +++ b/ql/src/test/results/clientpositive/cross_product_check_2.q.out @@ -53,11 +53,11 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -68,11 +68,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -80,10 +80,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -121,14 +121,14 @@ STAGE PLANS: $hdt$_0:d1 TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -136,11 +136,11 @@ STAGE PLANS: $hdt$_2:a TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -151,14 +151,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -166,7 +166,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -174,10 +174,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -220,14 +220,14 @@ STAGE PLANS: $hdt$_1:$hdt$_1:d1 TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -238,14 +238,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -253,17 +253,17 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -271,7 +271,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -289,11 +289,11 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -310,10 +310,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -351,11 +351,11 @@ STAGE PLANS: $hdt$_1:$hdt$_1:d1 TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -366,9 +366,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Map Join Operator condition map: Inner Join 0 to 1 @@ -376,17 +376,17 @@ STAGE PLANS: 0 1 outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -394,7 +394,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -412,11 +412,11 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -433,10 +433,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -479,27 +479,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: key (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -535,10 +535,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -571,10 +571,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -588,12 +588,12 @@ STAGE PLANS: TableScan Reduce Output Operator sort order: - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) TableScan Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reduce Operator Tree: Join Operator @@ -603,10 +603,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -622,14 +622,14 @@ STAGE PLANS: $hdt$_1:$hdt$_1:d1 TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -640,14 +640,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -655,17 +655,17 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -673,7 +673,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: diff --git a/ql/src/test/results/clientpositive/ctas.q.out b/ql/src/test/results/clientpositive/ctas.q.out index c9676de..f4286df 100644 --- a/ql/src/test/results/clientpositive/ctas.q.out +++ b/ql/src/test/results/clientpositive/ctas.q.out @@ -154,10 +154,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### @@ -305,10 +302,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### @@ -456,10 +450,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 120 totalSize 199 #### A masked pattern was here #### @@ -520,10 +511,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 120 totalSize 199 #### A masked pattern was here #### @@ -672,10 +660,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/ctas_colname.q.out b/ql/src/test/results/clientpositive/ctas_colname.q.out index 2622676..e1a864a 100644 --- a/ql/src/test/results/clientpositive/ctas_colname.q.out +++ b/ql/src/test/results/clientpositive/ctas_colname.q.out @@ -112,10 +112,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 20 - rawDataSize 620 totalSize 640 #### A masked pattern was here #### @@ -272,10 +269,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 25 - rawDataSize 242 totalSize 267 #### A masked pattern was here #### @@ -463,10 +457,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 20 - rawDataSize 268 totalSize 288 #### A masked pattern was here #### @@ -636,10 +627,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 25 - rawDataSize 309 totalSize 334 #### A masked pattern was here #### @@ -787,10 +775,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 309 - rawDataSize 3891 totalSize 4200 #### A masked pattern was here #### @@ -1221,10 +1206,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 5 - rawDataSize 45 totalSize 50 #### A masked pattern was here #### @@ -1356,10 +1338,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 5 - rawDataSize 35 totalSize 40 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out b/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out index 39d8bcf..6ebab83 100644 --- a/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out +++ b/ql/src/test/results/clientpositive/ctas_uses_database_location.q.out @@ -144,10 +144,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 500 - rawDataSize 5312 totalSize 5812 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/database_location.q.out b/ql/src/test/results/clientpositive/database_location.q.out index 797177d..926db3a 100644 --- a/ql/src/test/results/clientpositive/database_location.q.out +++ b/ql/src/test/results/clientpositive/database_location.q.out @@ -43,6 +43,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -113,6 +118,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/decimal_join2.q.out b/ql/src/test/results/clientpositive/decimal_join2.q.out index 3a0b327..5a52926 100644 --- a/ql/src/test/results/clientpositive/decimal_join2.q.out +++ b/ql/src/test/results/clientpositive/decimal_join2.q.out @@ -57,35 +57,35 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: decimal(38,18)), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(38,18)) sort order: + Map-reduce partition columns: _col0 (type: decimal(38,18)) - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int) TableScan alias: a - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: decimal(38,18)), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(38,18)) sort order: + Map-reduce partition columns: _col0 (type: decimal(38,18)) - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int) Reduce Operator Tree: Join Operator @@ -95,7 +95,7 @@ STAGE PLANS: 0 _col0 (type: decimal(38,18)) 1 _col0 (type: decimal(38,18)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 41 Data size: 4725 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 620 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -110,15 +110,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: decimal(38,18)), _col1 (type: int), _col2 (type: decimal(38,18)), _col3 (type: int) sort order: ++++ - Statistics: Num rows: 41 Data size: 4725 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 620 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(38,18)), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: decimal(38,18)), KEY.reducesinkkey3 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 41 Data size: 4725 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 620 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 41 Data size: 4725 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 620 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -225,14 +225,14 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: decimal(38,18)), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: decimal(38,18)) @@ -243,14 +243,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: decimal(38,18)), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 38 Data size: 4296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 564 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -258,21 +258,21 @@ STAGE PLANS: 0 _col0 (type: decimal(38,18)) 1 _col0 (type: decimal(38,18)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 41 Data size: 4725 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 620 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(38,18)), _col1 (type: int), _col2 (type: decimal(38,18)), _col3 (type: int) sort order: ++++ - Statistics: Num rows: 41 Data size: 4725 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 620 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(38,18)), KEY.reducesinkkey1 (type: int), KEY.reducesinkkey2 (type: decimal(38,18)), KEY.reducesinkkey3 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 41 Data size: 4725 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 620 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 41 Data size: 4725 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 620 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/decimal_serde.q.out b/ql/src/test/results/clientpositive/decimal_serde.q.out index e0f5c74..e466554 100644 --- a/ql/src/test/results/clientpositive/decimal_serde.q.out +++ b/ql/src/test/results/clientpositive/decimal_serde.q.out @@ -116,10 +116,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 38 - rawDataSize 157 totalSize 278 #### A masked pattern was here #### @@ -169,10 +166,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 38 - rawDataSize 157 totalSize 278 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/default_file_format.q.out b/ql/src/test/results/clientpositive/default_file_format.q.out index 3d5c20f..4e5f27d 100644 --- a/ql/src/test/results/clientpositive/default_file_format.q.out +++ b/ql/src/test/results/clientpositive/default_file_format.q.out @@ -59,6 +59,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -88,6 +93,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -117,6 +127,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/describe_comment_indent.q.out b/ql/src/test/results/clientpositive/describe_comment_indent.q.out index 3e0f45e..5a01de1 100644 --- a/ql/src/test/results/clientpositive/describe_comment_indent.q.out +++ b/ql/src/test/results/clientpositive/describe_comment_indent.q.out @@ -60,7 +60,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} comment table comment\ntwo lines + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out b/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out index df0b65e..703fa14 100644 --- a/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out +++ b/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out @@ -55,6 +55,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 0 + numRows 0 + rawDataSize 0 totalSize 0 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/describe_syntax.q.out b/ql/src/test/results/clientpositive/describe_syntax.q.out index d0c75ed..eb6b51a 100644 --- a/ql/src/test/results/clientpositive/describe_syntax.q.out +++ b/ql/src/test/results/clientpositive/describe_syntax.q.out @@ -99,6 +99,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -171,6 +176,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/describe_table.q.out b/ql/src/test/results/clientpositive/describe_table.q.out index 19664b0..009c75e 100644 --- a/ql/src/test/results/clientpositive/describe_table.q.out +++ b/ql/src/test/results/clientpositive/describe_table.q.out @@ -186,6 +186,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -271,6 +276,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out b/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out index b79095c..acf0cbc 100644 --- a/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out +++ b/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out @@ -159,6 +159,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.uservisits_web_text_none numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -177,6 +179,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.uservisits_web_text_none numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out b/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out index e0168b3..5f9a4d7 100644 --- a/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out +++ b/ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out @@ -110,17 +110,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns intcol columns.comments columns.types string #### A masked pattern was here #### name default.dynamic_part_table + numFiles 0 + numRows 0 partition_columns partcol1/partcol2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct dynamic_part_table { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dynamic_part_table @@ -204,17 +209,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns intcol columns.comments columns.types string #### A masked pattern was here #### name default.dynamic_part_table + numFiles 0 + numRows 0 partition_columns partcol1/partcol2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct dynamic_part_table { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dynamic_part_table @@ -308,17 +318,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns intcol columns.comments columns.types string #### A masked pattern was here #### name default.dynamic_part_table + numFiles 0 + numRows 0 partition_columns partcol1/partcol2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct dynamic_part_table { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dynamic_part_table @@ -352,17 +367,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns intcol columns.comments columns.types string #### A masked pattern was here #### name default.dynamic_part_table + numFiles 0 + numRows 0 partition_columns partcol1/partcol2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct dynamic_part_table { string intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dynamic_part_table diff --git a/ql/src/test/results/clientpositive/empty_join.q.out b/ql/src/test/results/clientpositive/empty_join.q.out index 9a47c1f..2201298 100644 --- a/ql/src/test/results/clientpositive/empty_join.q.out +++ b/ql/src/test/results/clientpositive/empty_join.q.out @@ -87,14 +87,14 @@ STAGE PLANS: $hdt$_2:t3 TableScan alias: t3 - Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: id is not null (type: boolean) - Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: id (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: int) @@ -106,14 +106,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: id is not null (type: boolean) - Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: id (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 2 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -123,10 +123,10 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 2 Data size: 2 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 2 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/exim_hidden_files.q.out b/ql/src/test/results/clientpositive/exim_hidden_files.q.out index 9b04a6d..762535a 100644 --- a/ql/src/test/results/clientpositive/exim_hidden_files.q.out +++ b/ql/src/test/results/clientpositive/exim_hidden_files.q.out @@ -75,6 +75,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/explain_ddl.q.out b/ql/src/test/results/clientpositive/explain_ddl.q.out index fa73d99..451b893 100644 --- a/ql/src/test/results/clientpositive/explain_ddl.q.out +++ b/ql/src/test/results/clientpositive/explain_ddl.q.out @@ -159,14 +159,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: m1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -343,14 +343,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: m1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -507,14 +507,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: m1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out index a30c356..868f077 100644 --- a/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out +++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_full.q.out @@ -143,17 +143,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -186,17 +191,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -276,17 +286,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -319,17 +334,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -476,17 +496,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -520,17 +545,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -564,17 +594,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -608,17 +643,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -695,17 +735,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -739,17 +784,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -783,17 +833,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -827,17 +882,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out index 4e589b8..359e4f6 100644 --- a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out +++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial.q.out @@ -160,17 +160,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -203,17 +208,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -246,17 +256,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -289,17 +304,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -379,17 +399,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -422,17 +447,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -465,17 +495,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -508,17 +543,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -611,17 +651,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -654,17 +699,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -697,17 +747,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -740,17 +795,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -826,17 +886,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -869,17 +934,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -912,17 +982,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -955,17 +1030,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,zip columns.comments columns.types string:int:int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, i32 locid, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -1113,17 +1193,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1157,17 +1242,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1201,17 +1291,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1245,17 +1340,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1289,17 +1389,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1333,17 +1438,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1377,17 +1487,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1421,17 +1536,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1465,17 +1585,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1509,17 +1634,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1553,17 +1683,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1640,17 +1775,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1684,17 +1824,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1728,17 +1873,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1772,17 +1922,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1816,17 +1971,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1860,17 +2020,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1904,17 +2069,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1948,17 +2118,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1992,17 +2167,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -2036,17 +2216,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -2080,17 +2265,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid columns.comments columns.types string:int #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d diff --git a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out index 3185f70..d834e81 100644 --- a/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out +++ b/ql/src/test/results/clientpositive/extrapolate_part_stats_partial_ndv.q.out @@ -237,17 +237,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt,zip columns.comments columns.types string:double:decimal(10,0):int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -280,17 +285,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt,zip columns.comments columns.types string:double:decimal(10,0):int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -323,17 +333,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt,zip columns.comments columns.types string:double:decimal(10,0):int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -366,17 +381,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt,zip columns.comments columns.types string:double:decimal(10,0):int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -550,17 +570,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt,zip columns.comments columns.types string:double:decimal(10,0):int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -593,17 +618,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt,zip columns.comments columns.types string:double:decimal(10,0):int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -636,17 +666,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt,zip columns.comments columns.types string:double:decimal(10,0):int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -679,17 +714,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt,zip columns.comments columns.types string:double:decimal(10,0):int #### A masked pattern was here #### name default.loc_orc_1d + numFiles 0 + numRows 0 partition_columns year partition_columns.types string + rawDataSize 0 serialization.ddl struct loc_orc_1d { string state, double locid, decimal(10,0) cnt, i32 zip} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_1d @@ -917,17 +957,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -961,17 +1006,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1005,17 +1055,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1049,17 +1104,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1093,17 +1153,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1137,17 +1202,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1181,17 +1251,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1225,17 +1300,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1269,17 +1349,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1313,17 +1398,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d @@ -1357,17 +1447,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns state,locid,cnt columns.comments columns.types string:int:decimal(10,0) #### A masked pattern was here #### name default.loc_orc_2d + numFiles 0 + numRows 0 partition_columns zip/year partition_columns.types int:string + rawDataSize 0 serialization.ddl struct loc_orc_2d { string state, i32 locid, decimal(10,0) cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde name: default.loc_orc_2d diff --git a/ql/src/test/results/clientpositive/filter_join_breaktask.q.out b/ql/src/test/results/clientpositive/filter_join_breaktask.q.out index 018fd5d..1772981 100644 --- a/ql/src/test/results/clientpositive/filter_join_breaktask.q.out +++ b/ql/src/test/results/clientpositive/filter_join_breaktask.q.out @@ -217,17 +217,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask @@ -349,17 +354,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask diff --git a/ql/src/test/results/clientpositive/fouter_join_ppr.q.out b/ql/src/test/results/clientpositive/fouter_join_ppr.q.out index e5ae828..79fa19e 100644 --- a/ql/src/test/results/clientpositive/fouter_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/fouter_join_ppr.q.out @@ -224,17 +224,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -270,17 +275,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -316,17 +326,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -362,17 +377,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -695,17 +715,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -741,17 +766,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -787,17 +817,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -833,17 +868,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1178,17 +1218,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1224,17 +1269,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1554,17 +1604,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1600,17 +1655,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/groupby_map_ppr.q.out b/ql/src/test/results/clientpositive/groupby_map_ppr.q.out index 556995c..a005d98 100644 --- a/ql/src/test/results/clientpositive/groupby_map_ppr.q.out +++ b/ql/src/test/results/clientpositive/groupby_map_ppr.q.out @@ -161,17 +161,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -207,17 +212,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -248,15 +258,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2 columns.comments columns.types string:int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -273,15 +288,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2 columns.comments columns.types string:int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out index 393e6fa..50cd8d0 100644 --- a/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out @@ -178,17 +178,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -224,17 +229,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -265,15 +275,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2,c3,c4 columns.comments columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -290,15 +305,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2,c3,c4 columns.comments columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/groupby_ppr.q.out b/ql/src/test/results/clientpositive/groupby_ppr.q.out index 2e160b3..80d9cc1 100644 --- a/ql/src/test/results/clientpositive/groupby_ppr.q.out +++ b/ql/src/test/results/clientpositive/groupby_ppr.q.out @@ -154,17 +154,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -200,17 +205,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -241,15 +251,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2 columns.comments columns.types string:int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -266,15 +281,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2 columns.comments columns.types string:int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out index 93fbc0e..0928fbf 100644 --- a/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out @@ -171,17 +171,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -217,17 +222,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -258,15 +268,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2,c3,c4 columns.comments columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -283,15 +298,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2,c3,c4 columns.comments columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out b/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out index efb13a7..610cc5b 100644 --- a/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out +++ b/ql/src/test/results/clientpositive/groupby_sort_1_23.q.out @@ -122,15 +122,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -209,15 +214,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -240,15 +250,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -264,30 +279,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -309,15 +334,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -333,30 +363,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -551,15 +591,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2 @@ -576,15 +621,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2 @@ -1525,15 +1575,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1612,15 +1667,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1643,15 +1703,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1667,30 +1732,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1712,15 +1787,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1736,30 +1816,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1958,15 +2048,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,cnt columns.comments columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4 @@ -1983,15 +2078,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,cnt columns.comments columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4 @@ -5079,15 +5179,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5166,15 +5271,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5197,15 +5307,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5221,30 +5336,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5266,15 +5391,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5290,30 +5420,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 diff --git a/ql/src/test/results/clientpositive/groupby_sort_6.q.out b/ql/src/test/results/clientpositive/groupby_sort_6.q.out index 844a2a4..08d1783 100644 --- a/ql/src/test/results/clientpositive/groupby_sort_6.q.out +++ b/ql/src/test/results/clientpositive/groupby_sort_6.q.out @@ -117,15 +117,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -142,15 +147,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -451,8 +461,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.t1 numFiles 1 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct t1 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -463,17 +475,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,val columns.comments columns.types string:string #### A masked pattern was here #### name default.t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct t1 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 diff --git a/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out b/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out index 860a880..724f20b 100644 --- a/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out +++ b/ql/src/test/results/clientpositive/groupby_sort_skew_1_23.q.out @@ -122,15 +122,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -209,15 +214,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -240,15 +250,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -264,30 +279,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -309,15 +334,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -333,30 +363,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -617,15 +657,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2 @@ -642,15 +687,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2 @@ -1591,15 +1641,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1678,15 +1733,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1709,15 +1769,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1733,30 +1798,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1778,15 +1853,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1802,30 +1882,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -2090,15 +2180,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,cnt columns.comments columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4 @@ -2115,15 +2210,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,cnt columns.comments columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4 @@ -5541,15 +5641,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5628,15 +5733,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5659,15 +5769,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5683,30 +5798,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5728,15 +5853,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -5752,30 +5882,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 diff --git a/ql/src/test/results/clientpositive/input23.q.out b/ql/src/test/results/clientpositive/input23.q.out index 33f2eb6..614c097 100644 --- a/ql/src/test/results/clientpositive/input23.q.out +++ b/ql/src/test/results/clientpositive/input23.q.out @@ -133,17 +133,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/input42.q.out b/ql/src/test/results/clientpositive/input42.q.out index 4219c17..59494ce 100644 --- a/ql/src/test/results/clientpositive/input42.q.out +++ b/ql/src/test/results/clientpositive/input42.q.out @@ -69,17 +69,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -113,17 +118,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1223,17 +1233,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1267,17 +1282,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1760,17 +1780,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1804,17 +1829,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/input_part1.q.out b/ql/src/test/results/clientpositive/input_part1.q.out index 501f7a9..c640bbb 100644 --- a/ql/src/test/results/clientpositive/input_part1.q.out +++ b/ql/src/test/results/clientpositive/input_part1.q.out @@ -107,15 +107,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -156,17 +161,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -192,15 +202,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -223,15 +238,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -247,30 +267,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -292,15 +322,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -316,30 +351,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/input_part2.q.out b/ql/src/test/results/clientpositive/input_part2.q.out index 9eba510..9b523f4 100644 --- a/ql/src/test/results/clientpositive/input_part2.q.out +++ b/ql/src/test/results/clientpositive/input_part2.q.out @@ -175,15 +175,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -209,15 +214,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 @@ -258,17 +268,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -304,17 +319,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -341,15 +361,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -372,15 +397,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -396,30 +426,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -441,15 +481,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -465,30 +510,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -520,15 +575,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 @@ -551,15 +611,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 @@ -575,30 +640,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 @@ -620,15 +695,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 @@ -644,30 +724,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest2 { i32 key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest2 diff --git a/ql/src/test/results/clientpositive/input_part7.q.out b/ql/src/test/results/clientpositive/input_part7.q.out index 53125d1..f94777b 100644 --- a/ql/src/test/results/clientpositive/input_part7.q.out +++ b/ql/src/test/results/clientpositive/input_part7.q.out @@ -205,17 +205,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -251,17 +256,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/input_part9.q.out b/ql/src/test/results/clientpositive/input_part9.q.out index 0a3c87b..2153143 100644 --- a/ql/src/test/results/clientpositive/input_part9.q.out +++ b/ql/src/test/results/clientpositive/input_part9.q.out @@ -74,17 +74,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -118,17 +123,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/insert_values_orig_table_use_metadata.q.out b/ql/src/test/results/clientpositive/insert_values_orig_table_use_metadata.q.out new file mode 100644 index 0000000..7356239 --- /dev/null +++ b/ql/src/test/results/clientpositive/insert_values_orig_table_use_metadata.q.out @@ -0,0 +1,994 @@ +PREHOOK: query: create table acid_ivot( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: create table acid_ivot( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@acid_ivot +PREHOOK: query: desc formatted acid_ivot +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@acid_ivot +POSTHOOK: query: desc formatted acid_ivot +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@acid_ivot +# col_name data_type comment + +ctinyint tinyint +csmallint smallint +cint int +cbigint bigint +cfloat float +cdouble double +cstring1 string +cstring2 string +ctimestamp1 timestamp +ctimestamp2 timestamp +cboolean1 boolean +cboolean2 boolean + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 + transactional true +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 1 +Bucket Columns: [cint] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@acid_ivot +PREHOOK: query: desc formatted acid_ivot +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@acid_ivot +POSTHOOK: query: desc formatted acid_ivot +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@acid_ivot +# col_name data_type comment + +ctinyint tinyint +csmallint smallint +cint int +cbigint bigint +cfloat float +cdouble double +cstring1 string +cstring2 string +ctimestamp1 timestamp +ctimestamp2 timestamp +cboolean1 boolean +cboolean2 boolean + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 377237 + transactional true +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 1 +Bucket Columns: [cint] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from acid_ivot +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from acid_ivot +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: acid_ivot + Statistics: Num rows: 1 Data size: 377237 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 377237 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from acid_ivot +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from acid_ivot +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +12288 +PREHOOK: query: insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__1 +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__1 +POSTHOOK: Output: default@acid_ivot +POSTHOOK: Lineage: acid_ivot.cbigint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col4, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cboolean1 EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col11, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cboolean2 EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col12, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cdouble EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col6, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cfloat EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col5, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.csmallint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cstring1 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col7, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cstring2 SIMPLE [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col8, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctimestamp1 EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col9, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctimestamp2 EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col10, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctinyint EXPRESSION [(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +PREHOOK: query: desc formatted acid_ivot +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@acid_ivot +POSTHOOK: query: desc formatted acid_ivot +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@acid_ivot +# col_name data_type comment + +ctinyint tinyint +csmallint smallint +cint int +cbigint bigint +cfloat float +cdouble double +cstring1 string +cstring2 string +ctimestamp1 timestamp +ctimestamp2 timestamp +cboolean1 boolean +cboolean2 boolean + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 2 + numRows 0 + rawDataSize 0 + totalSize 378741 + transactional true +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 1 +Bucket Columns: [cint] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from acid_ivot +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from acid_ivot +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: acid_ivot + Statistics: Num rows: 1 Data size: 378741 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 378741 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from acid_ivot +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from acid_ivot +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +12290 +PREHOOK: query: drop table acid_ivot +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@acid_ivot +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: drop table acid_ivot +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@acid_ivot +POSTHOOK: Output: default@acid_ivot +PREHOOK: query: create table acid_ivot( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true') +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: create table acid_ivot( + ctinyint TINYINT, + csmallint SMALLINT, + cint INT, + cbigint BIGINT, + cfloat FLOAT, + cdouble DOUBLE, + cstring1 STRING, + cstring2 STRING, + ctimestamp1 TIMESTAMP, + ctimestamp2 TIMESTAMP, + cboolean1 BOOLEAN, + cboolean2 BOOLEAN) clustered by (cint) into 1 buckets stored as orc TBLPROPERTIES ('transactional'='true') +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@acid_ivot +PREHOOK: query: insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__2 +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__2 +POSTHOOK: Output: default@acid_ivot +POSTHOOK: Lineage: acid_ivot.cbigint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col4, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cboolean1 EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col11, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cboolean2 EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col12, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cdouble EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col6, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cfloat EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col5, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.csmallint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cstring1 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col7, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cstring2 SIMPLE [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col8, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctimestamp1 EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col9, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctimestamp2 EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col10, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctinyint EXPRESSION [(values__tmp__table__2)values__tmp__table__2.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +PREHOOK: query: desc formatted acid_ivot +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@acid_ivot +POSTHOOK: query: desc formatted acid_ivot +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@acid_ivot +# col_name data_type comment + +ctinyint tinyint +csmallint smallint +cint int +cbigint bigint +cfloat float +cdouble double +cstring1 string +cstring2 string +ctimestamp1 timestamp +ctimestamp2 timestamp +cboolean1 boolean +cboolean2 boolean + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 1 + numRows 2 + rawDataSize 0 + totalSize 1508 + transactional true +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 1 +Bucket Columns: [cint] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from acid_ivot +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from acid_ivot +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from acid_ivot +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from acid_ivot +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +2 +PREHOOK: query: insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true) +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__3 +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: insert into table acid_ivot values + (1, 2, 3, 4, 3.14, 2.34, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true), + (111, 222, 3333, 444, 13.14, 10239302.34239320, 'fred', 'bob', '2014-09-01 10:34:23.111', '1944-06-06 06:00:00', true, true) +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__3 +POSTHOOK: Output: default@acid_ivot +POSTHOOK: Lineage: acid_ivot.cbigint EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col4, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cboolean1 EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col11, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cboolean2 EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col12, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cdouble EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col6, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cfloat EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col5, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cint EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col3, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.csmallint EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cstring1 SIMPLE [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col7, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.cstring2 SIMPLE [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col8, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctimestamp1 EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col9, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctimestamp2 EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col10, type:string, comment:), ] +POSTHOOK: Lineage: acid_ivot.ctinyint EXPRESSION [(values__tmp__table__3)values__tmp__table__3.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +PREHOOK: query: desc formatted acid_ivot +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@acid_ivot +POSTHOOK: query: desc formatted acid_ivot +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@acid_ivot +# col_name data_type comment + +ctinyint tinyint +csmallint smallint +cint int +cbigint bigint +cfloat float +cdouble double +cstring1 string +cstring2 string +ctimestamp1 timestamp +ctimestamp2 timestamp +cboolean1 boolean +cboolean2 boolean + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 2 + numRows 4 + rawDataSize 0 + totalSize 3016 + transactional true +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 1 +Bucket Columns: [cint] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from acid_ivot +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from acid_ivot +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from acid_ivot +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from acid_ivot +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +4 +PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/alltypesorc" into table acid_ivot +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@acid_ivot +PREHOOK: query: desc formatted acid_ivot +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@acid_ivot +POSTHOOK: query: desc formatted acid_ivot +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@acid_ivot +# col_name data_type comment + +ctinyint tinyint +csmallint smallint +cint int +cbigint bigint +cfloat float +cdouble double +cstring1 string +cstring2 string +ctimestamp1 timestamp +ctimestamp2 timestamp +cboolean1 boolean +cboolean2 boolean + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + numFiles 3 + numRows 4 + rawDataSize 0 + totalSize 380253 + transactional true +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.ql.io.orc.OrcSerde +InputFormat: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat +Compressed: No +Num Buckets: 1 +Bucket Columns: [cint] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from acid_ivot +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from acid_ivot +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: acid_ivot + Statistics: Num rows: 4 Data size: 380253 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 4 Data size: 380253 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: drop table acid_ivot +PREHOOK: type: DROPTABLE +PREHOOK: Input: default@acid_ivot +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: drop table acid_ivot +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: default@acid_ivot +POSTHOOK: Output: default@acid_ivot +PREHOOK: query: create table acid_ivot like src +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: create table acid_ivot like src +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@acid_ivot +PREHOOK: query: desc formatted acid_ivot +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@acid_ivot +POSTHOOK: query: desc formatted acid_ivot +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@acid_ivot +# col_name data_type comment + +key string default +value string default + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: insert overwrite table acid_ivot select * from src +PREHOOK: type: QUERY +PREHOOK: Input: default@src +PREHOOK: Output: default@acid_ivot +POSTHOOK: query: insert overwrite table acid_ivot select * from src +POSTHOOK: type: QUERY +POSTHOOK: Input: default@src +POSTHOOK: Output: default@acid_ivot +POSTHOOK: Lineage: acid_ivot.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: acid_ivot.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +PREHOOK: query: desc formatted acid_ivot +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@acid_ivot +POSTHOOK: query: desc formatted acid_ivot +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@acid_ivot +# col_name data_type comment + +key string default +value string default + +# Detailed Table Information +Database: default +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from acid_ivot +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from acid_ivot +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from acid_ivot +PREHOOK: type: QUERY +PREHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from acid_ivot +POSTHOOK: type: QUERY +POSTHOOK: Input: default@acid_ivot +#### A masked pattern was here #### +500 +PREHOOK: query: CREATE TABLE sp (key STRING COMMENT 'default', value STRING COMMENT 'default') +PARTITIONED BY (ds STRING, hr STRING) +STORED AS TEXTFILE +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@sp +POSTHOOK: query: CREATE TABLE sp (key STRING COMMENT 'default', value STRING COMMENT 'default') +PARTITIONED BY (ds STRING, hr STRING) +STORED AS TEXTFILE +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@sp +PREHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/kv1.txt" +OVERWRITE INTO TABLE sp PARTITION (ds="2008-04-08", hr="11") +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: default@sp +POSTHOOK: query: LOAD DATA LOCAL INPATH "../../data/files/kv1.txt" +OVERWRITE INTO TABLE sp PARTITION (ds="2008-04-08", hr="11") +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: default@sp +POSTHOOK: Output: default@sp@ds=2008-04-08/hr=11 +PREHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11") +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@sp +POSTHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11") +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@sp +# col_name data_type comment + +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: sp +#### A masked pattern was here #### +Partition Parameters: + numFiles 1 + numRows 0 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from sp where ds="2008-04-08" and hr="11" +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from sp where ds="2008-04-08" and hr="11" +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 + +STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: sp + Statistics: Num rows: 1 Data size: 5812 Basic stats: PARTIAL Column stats: NONE + Select Operator + Statistics: Num rows: 1 Data size: 5812 Basic stats: PARTIAL Column stats: NONE + Group By Operator + aggregations: count() + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: NONE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + + Stage: Stage-0 + Fetch Operator + limit: -1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from sp where ds="2008-04-08" and hr="11" +PREHOOK: type: QUERY +PREHOOK: Input: default@sp +PREHOOK: Input: default@sp@ds=2008-04-08/hr=11 +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from sp where ds="2008-04-08" and hr="11" +POSTHOOK: type: QUERY +POSTHOOK: Input: default@sp +POSTHOOK: Input: default@sp@ds=2008-04-08/hr=11 +#### A masked pattern was here #### +500 +PREHOOK: query: insert into table sp PARTITION (ds="2008-04-08", hr="11") values + ('1', '2'), ('3', '4') +PREHOOK: type: QUERY +PREHOOK: Input: default@values__tmp__table__4 +PREHOOK: Output: default@sp@ds=2008-04-08/hr=11 +POSTHOOK: query: insert into table sp PARTITION (ds="2008-04-08", hr="11") values + ('1', '2'), ('3', '4') +POSTHOOK: type: QUERY +POSTHOOK: Input: default@values__tmp__table__4 +POSTHOOK: Output: default@sp@ds=2008-04-08/hr=11 +POSTHOOK: Lineage: sp PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col1, type:string, comment:), ] +POSTHOOK: Lineage: sp PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(values__tmp__table__4)values__tmp__table__4.FieldSchema(name:tmp_values_col2, type:string, comment:), ] +PREHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11") +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@sp +POSTHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11") +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@sp +# col_name data_type comment + +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: sp +#### A masked pattern was here #### +Partition Parameters: + numFiles 2 + numRows 0 + rawDataSize 0 + totalSize 5820 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: analyze table sp PARTITION (ds="2008-04-08", hr="11") compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: default@sp +PREHOOK: Input: default@sp@ds=2008-04-08/hr=11 +PREHOOK: Output: default@sp +PREHOOK: Output: default@sp@ds=2008-04-08/hr=11 +POSTHOOK: query: analyze table sp PARTITION (ds="2008-04-08", hr="11") compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: default@sp +POSTHOOK: Input: default@sp@ds=2008-04-08/hr=11 +POSTHOOK: Output: default@sp +POSTHOOK: Output: default@sp@ds=2008-04-08/hr=11 +PREHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11") +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@sp +POSTHOOK: query: desc formatted sp PARTITION (ds="2008-04-08", hr="11") +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@sp +# col_name data_type comment + +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: default +Table: sp +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 2 + numRows 502 + rawDataSize 5318 + totalSize 5820 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: explain select count(*) from sp where ds="2008-04-08" and hr="11" +PREHOOK: type: QUERY +POSTHOOK: query: explain select count(*) from sp where ds="2008-04-08" and hr="11" +POSTHOOK: type: QUERY +STAGE DEPENDENCIES: + Stage-0 is a root stage + +STAGE PLANS: + Stage: Stage-0 + Fetch Operator + limit: 1 + Processor Tree: + ListSink + +PREHOOK: query: select count(*) from sp where ds="2008-04-08" and hr="11" +PREHOOK: type: QUERY +PREHOOK: Input: default@sp +#### A masked pattern was here #### +POSTHOOK: query: select count(*) from sp where ds="2008-04-08" and hr="11" +POSTHOOK: type: QUERY +POSTHOOK: Input: default@sp +#### A masked pattern was here #### +502 diff --git a/ql/src/test/results/clientpositive/join17.q.out b/ql/src/test/results/clientpositive/join17.q.out index 13260ef..078a8b8 100644 --- a/ql/src/test/results/clientpositive/join17.q.out +++ b/ql/src/test/results/clientpositive/join17.q.out @@ -181,15 +181,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,value1,key2,value2 columns.comments columns.types int:string:int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -206,15 +211,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,value1,key2,value2 columns.comments columns.types int:string:int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key1, string value1, i32 key2, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/join26.q.out b/ql/src/test/results/clientpositive/join26.q.out index 5533ce4..ece665c 100644 --- a/ql/src/test/results/clientpositive/join26.q.out +++ b/ql/src/test/results/clientpositive/join26.q.out @@ -191,15 +191,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -242,17 +247,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -278,15 +288,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -309,15 +324,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -333,30 +353,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -378,15 +408,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -402,30 +437,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 diff --git a/ql/src/test/results/clientpositive/join32.q.out b/ql/src/test/results/clientpositive/join32.q.out index 5cb124b..ec250cb 100644 --- a/ql/src/test/results/clientpositive/join32.q.out +++ b/ql/src/test/results/clientpositive/join32.q.out @@ -201,15 +201,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -340,17 +345,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -367,15 +377,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 diff --git a/ql/src/test/results/clientpositive/join32_lessSize.q.out b/ql/src/test/results/clientpositive/join32_lessSize.q.out index 544e814..1534758 100644 --- a/ql/src/test/results/clientpositive/join32_lessSize.q.out +++ b/ql/src/test/results/clientpositive/join32_lessSize.q.out @@ -263,17 +263,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -336,15 +341,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -432,15 +442,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -1520,17 +1535,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1584,15 +1604,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j2 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j2 @@ -1655,17 +1680,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1682,15 +1712,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j2 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j2 @@ -2128,17 +2163,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2268,17 +2308,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/join33.q.out b/ql/src/test/results/clientpositive/join33.q.out index 5cb124b..ec250cb 100644 --- a/ql/src/test/results/clientpositive/join33.q.out +++ b/ql/src/test/results/clientpositive/join33.q.out @@ -201,15 +201,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -340,17 +345,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -367,15 +377,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 diff --git a/ql/src/test/results/clientpositive/join34.q.out b/ql/src/test/results/clientpositive/join34.q.out index e2c2b1a..bdf56db 100644 --- a/ql/src/test/results/clientpositive/join34.q.out +++ b/ql/src/test/results/clientpositive/join34.q.out @@ -212,15 +212,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -265,15 +270,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -385,15 +395,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 diff --git a/ql/src/test/results/clientpositive/join35.q.out b/ql/src/test/results/clientpositive/join35.q.out index 96f7cc8..9787e1a 100644 --- a/ql/src/test/results/clientpositive/join35.q.out +++ b/ql/src/test/results/clientpositive/join35.q.out @@ -317,15 +317,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:int #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -360,15 +365,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:int #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -476,15 +486,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:int #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, i32 val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 diff --git a/ql/src/test/results/clientpositive/join41.q.out b/ql/src/test/results/clientpositive/join41.q.out index 2ad59f5..24383b1 100644 --- a/ql/src/test/results/clientpositive/join41.q.out +++ b/ql/src/test/results/clientpositive/join41.q.out @@ -26,32 +26,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan alias: src1 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) > 10.0) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -61,10 +61,10 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -105,32 +105,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan alias: src1 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) > 10.0) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -140,10 +140,10 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 23 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 26 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/join42.q.out b/ql/src/test/results/clientpositive/join42.q.out index 462e49e..b199021 100644 --- a/ql/src/test/results/clientpositive/join42.q.out +++ b/ql/src/test/results/clientpositive/join42.q.out @@ -131,40 +131,40 @@ STAGE PLANS: Map Operator Tree: TableScan alias: l - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (id = 4436) (type: boolean) - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE TableScan alias: la - Statistics: Num rows: 1 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((loan_id = 4436) and aid is not null and pi_id is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: aid (type: int), pi_id (type: int) outputColumnNames: _col1, _col2 - Statistics: Num rows: 1 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 15 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int) TableScan alias: fr - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (loan_id = 4436) (type: boolean) - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -175,7 +175,7 @@ STAGE PLANS: 1 2 outputColumnNames: _col2, _col3 - Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 11 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -191,38 +191,38 @@ STAGE PLANS: key expressions: _col2 (type: int) sort order: + Map-reduce partition columns: _col2 (type: int) - Statistics: Num rows: 2 Data size: 8 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 11 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: int) TableScan alias: a - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: id is not null (type: boolean) - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: id (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE TableScan alias: acct - Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 34 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (aid is not null and brn is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 34 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: aid (type: int), acc_n (type: int), brn (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 34 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 31 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 34 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: int), _col2 (type: int) Reduce Operator Tree: Join Operator @@ -234,7 +234,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col3, _col7, _col8 - Statistics: Num rows: 6 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 24 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -250,23 +250,23 @@ STAGE PLANS: key expressions: _col3 (type: int) sort order: + Map-reduce partition columns: _col3 (type: int) - Statistics: Num rows: 6 Data size: 68 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 24 Basic stats: COMPLETE Column stats: NONE value expressions: _col7 (type: int), _col8 (type: int) TableScan alias: pi - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: id is not null (type: boolean) - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: id (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -275,14 +275,14 @@ STAGE PLANS: 0 _col3 (type: int) 1 _col0 (type: int) outputColumnNames: _col7, _col8 - Statistics: Num rows: 6 Data size: 74 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 26 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col7 (type: int), _col8 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 74 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 26 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6 Data size: 74 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 26 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/join9.q.out b/ql/src/test/results/clientpositive/join9.q.out index e904b31..eedde13 100644 --- a/ql/src/test/results/clientpositive/join9.q.out +++ b/ql/src/test/results/clientpositive/join9.q.out @@ -200,17 +200,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -243,15 +248,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -268,15 +278,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/join_filters_overlap.q.out b/ql/src/test/results/clientpositive/join_filters_overlap.q.out index dede6b7..37259d9 100644 --- a/ql/src/test/results/clientpositive/join_filters_overlap.q.out +++ b/ql/src/test/results/clientpositive/join_filters_overlap.q.out @@ -104,60 +104,60 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 50) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 50 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false @@ -170,7 +170,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -178,8 +177,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -190,7 +187,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -198,8 +194,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -227,13 +221,13 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -367,60 +361,60 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 50) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 50 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false @@ -433,7 +427,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -441,8 +434,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -453,7 +444,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -461,8 +451,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -490,13 +478,13 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -644,60 +632,60 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 50) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 50 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false @@ -710,7 +698,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -718,8 +705,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -730,7 +715,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -738,8 +722,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -767,13 +749,13 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -935,77 +917,77 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 40) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 40 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 3 value expressions: _col1 (type: int) auto parallelism: false @@ -1018,7 +1000,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1026,8 +1007,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1038,7 +1017,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1046,8 +1024,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1079,13 +1055,13 @@ STAGE PLANS: 2 _col0 (type: int) 3 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 9 Data size: 59 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 69 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 9 Data size: 59 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 69 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1238,81 +1214,81 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 50) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 50 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 40) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 40 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 3 value expressions: _col1 (type: int) auto parallelism: false @@ -1325,7 +1301,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1333,8 +1308,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1345,7 +1318,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1353,8 +1325,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1385,13 +1355,13 @@ STAGE PLANS: 2 _col0 (type: int) 3 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 9 Data size: 59 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 69 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 9 Data size: 59 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 69 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat diff --git a/ql/src/test/results/clientpositive/join_map_ppr.q.out b/ql/src/test/results/clientpositive/join_map_ppr.q.out index 444fd7f..c7d683b 100644 --- a/ql/src/test/results/clientpositive/join_map_ppr.q.out +++ b/ql/src/test/results/clientpositive/join_map_ppr.q.out @@ -193,15 +193,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -244,17 +249,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -280,15 +290,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -311,15 +326,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -335,30 +355,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -380,15 +410,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -404,30 +439,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -856,17 +901,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/lateral_view_outer.q.out b/ql/src/test/results/clientpositive/lateral_view_outer.q.out index 994945a..de00fe7 100644 --- a/ql/src/test/results/clientpositive/lateral_view_outer.q.out +++ b/ql/src/test/results/clientpositive/lateral_view_outer.q.out @@ -210,26 +210,26 @@ STAGE PLANS: Map Operator Tree: TableScan alias: array_valued - Statistics: Num rows: 500 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 6110 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 500 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 6110 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: array) outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 6110 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col1, _col5 - Statistics: Num rows: 1000 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 12220 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: array), _col5 (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1000 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 12220 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 110 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 12220 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 110 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 12220 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -237,24 +237,24 @@ STAGE PLANS: Select Operator expressions: value (type: array) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 6110 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 500 Data size: 5610 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 6110 Basic stats: COMPLETE Column stats: NONE function name: explode outer lateral view: true Lateral View Join Operator outputColumnNames: _col0, _col1, _col5 - Statistics: Num rows: 1000 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 12220 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: array), _col5 (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1000 Data size: 11220 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 12220 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 110 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 12220 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 110 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 12220 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out index 0cd07ef..d92b968 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_1.q.out @@ -94,17 +94,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part @@ -145,17 +150,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -191,17 +201,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -222,17 +237,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part @@ -465,17 +485,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out index f304083..4dbd484 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_11.q.java1.7.out @@ -97,17 +97,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -176,17 +181,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -339,17 +349,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out index 7d8070a..ff2d802 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_12.q.java1.7.out @@ -101,17 +101,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns col1,col2,col3,col4,col5 columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col @@ -180,17 +185,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns col1,col2,col3,col4,col5 columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col @@ -352,17 +362,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns col1,col2,col3,col4,col5 columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col @@ -478,17 +493,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns col1,col2,col3,col4,col5 columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out index 81b1431..e8eaed1 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_13.q.java1.7.out @@ -101,17 +101,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns col1,col2,col3,col4,col5 columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col @@ -180,17 +185,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns col1,col2,col3,col4,col5 columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col @@ -352,17 +362,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns col1,col2,col3,col4,col5 columns.comments columns.types string:string:string:string:string #### A masked pattern was here #### name default.list_bucketing_mul_col + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_mul_col { string col1, string col2, string col3, string col4, string col5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_mul_col diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out index 790801f..ee86b73 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_14.q.out @@ -73,15 +73,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct list_bucketing { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing @@ -147,15 +152,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct list_bucketing { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out index bfc1e43..59b5c16 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_2.q.java1.7.out @@ -118,17 +118,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -169,17 +174,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -215,17 +225,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -246,17 +261,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -438,17 +458,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out index ea24f86..ff98222 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_3.q.out @@ -86,17 +86,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_static_part @@ -137,17 +142,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -183,17 +193,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -214,17 +229,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_static_part @@ -407,17 +427,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_static_part diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out index 6b8b89f..110bba5 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_4.q.java1.7.out @@ -126,17 +126,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -177,17 +182,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -223,17 +233,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -254,17 +269,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -420,16 +440,21 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -470,17 +495,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -516,17 +546,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -556,16 +591,21 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -587,32 +627,42 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -634,32 +684,42 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -847,17 +907,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out index 1019474..6795ce4 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_5.q.java1.7.out @@ -98,17 +98,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part @@ -149,17 +154,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -195,17 +205,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -226,17 +241,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part @@ -480,17 +500,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part @@ -524,17 +549,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.list_bucketing_dynamic_part diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out index 668e918..c940daa 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_6.q.java1.7.out @@ -182,17 +182,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -233,17 +238,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -279,17 +289,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -310,17 +325,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -533,17 +553,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -584,17 +609,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -630,17 +660,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -670,17 +705,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -702,34 +742,44 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -751,34 +801,44 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -1005,17 +1065,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -1049,17 +1114,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out b/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out index 3aa9003..282f35a 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_7.q.out @@ -128,17 +128,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -179,17 +184,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -225,17 +235,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -256,17 +271,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -479,17 +499,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -530,17 +555,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -576,17 +606,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -616,17 +651,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -648,34 +688,44 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -697,34 +747,44 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns hr partition_columns.types string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -951,17 +1011,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -995,17 +1060,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out index 672e5ac..d737479 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_8.q.java1.7.out @@ -184,17 +184,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -235,17 +240,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -281,17 +291,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -312,17 +327,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -482,6 +502,8 @@ Table: list_bucketing_dynamic_part #### A masked pattern was here #### Partition Parameters: numFiles 3 + numRows 984 + rawDataSize 9488 totalSize 10586 #### A masked pattern was here #### @@ -595,17 +617,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -624,8 +651,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.list_bucketing_dynamic_part numFiles 3 + numRows 984 partition_columns ds/hr partition_columns.types string:string + rawDataSize 9488 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe @@ -636,17 +665,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_dynamic_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_dynamic_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_dynamic_part @@ -654,16 +688,16 @@ STAGE PLANS: Processor Tree: TableScan alias: list_bucketing_dynamic_part - Statistics: Num rows: 16 Data size: 136 Basic stats: PARTIAL Column stats: NONE + Statistics: Num rows: 1000 Data size: 9624 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: ((key = '484') and (value = 'val_484')) (type: boolean) - Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: '484' (type: string), 'val_484' (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 4 Data size: 34 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: select * from list_bucketing_dynamic_part where key = '484' and value = 'val_484' diff --git a/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out b/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out index 7179f61..5877091 100644 --- a/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out +++ b/ql/src/test/results/clientpositive/list_bucket_dml_9.q.java1.7.out @@ -126,17 +126,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -177,17 +182,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -223,17 +233,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -254,17 +269,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -420,16 +440,21 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -470,17 +495,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -516,17 +546,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -556,16 +591,21 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -587,32 +627,42 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -634,32 +684,42 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileBlockMergeInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part @@ -847,17 +907,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.list_bucketing_static_part + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct list_bucketing_static_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.list_bucketing_static_part diff --git a/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out b/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out index 1d8ec8b..1980066 100644 --- a/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_query_multiskew_1.q.out @@ -190,17 +190,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -318,17 +323,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -439,17 +449,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -561,17 +576,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily diff --git a/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out b/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out index 83576f1..cc8ed8b 100644 --- a/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_query_multiskew_2.q.out @@ -188,17 +188,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -308,17 +313,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -449,17 +459,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily diff --git a/ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out b/ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out index 2e77245..c9bf7bb 100644 --- a/ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_query_multiskew_3.q.out @@ -302,17 +302,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -436,17 +441,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -581,17 +591,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -703,17 +718,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct fact_daily { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily diff --git a/ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out b/ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out index 1895ab0..3aa43eb 100644 --- a/ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_query_oneskew_1.q.out @@ -236,6 +236,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} EXTERNAL TRUE bucket_count -1 columns x @@ -243,11 +244,15 @@ STAGE PLANS: columns.types int #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct fact_daily { i32 x} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -348,6 +353,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} EXTERNAL TRUE bucket_count -1 columns x @@ -355,11 +361,15 @@ STAGE PLANS: columns.types int #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct fact_daily { i32 x} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -456,6 +466,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} EXTERNAL TRUE bucket_count -1 columns x @@ -463,11 +474,15 @@ STAGE PLANS: columns.types int #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct fact_daily { i32 x} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily diff --git a/ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out b/ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out index 135f5e1..92495db 100644 --- a/ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_query_oneskew_2.q.out @@ -253,6 +253,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} EXTERNAL TRUE bucket_count -1 columns x,y @@ -260,11 +261,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct fact_daily { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -385,6 +390,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} EXTERNAL TRUE bucket_count -1 columns x,y @@ -392,11 +398,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct fact_daily { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -536,6 +546,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} EXTERNAL TRUE bucket_count -1 columns x,y @@ -543,11 +554,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct fact_daily { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily @@ -725,6 +740,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} EXTERNAL TRUE bucket_count -1 columns x,y @@ -732,11 +748,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct fact_daily { i32 x, string y} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily diff --git a/ql/src/test/results/clientpositive/list_bucket_query_oneskew_3.q.out b/ql/src/test/results/clientpositive/list_bucket_query_oneskew_3.q.out index 3a40305..9901c52 100644 --- a/ql/src/test/results/clientpositive/list_bucket_query_oneskew_3.q.out +++ b/ql/src/test/results/clientpositive/list_bucket_query_oneskew_3.q.out @@ -264,6 +264,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} EXTERNAL TRUE bucket_count -1 columns x,y,z @@ -271,11 +272,15 @@ STAGE PLANS: columns.types int:string:string #### A masked pattern was here #### name default.fact_daily + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct fact_daily { i32 x, string y, string z} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.fact_daily diff --git a/ql/src/test/results/clientpositive/load_dyn_part8.q.out b/ql/src/test/results/clientpositive/load_dyn_part8.q.out index 84ccb75..85dc630 100644 --- a/ql/src/test/results/clientpositive/load_dyn_part8.q.out +++ b/ql/src/test/results/clientpositive/load_dyn_part8.q.out @@ -233,17 +233,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -279,17 +284,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -325,17 +335,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -371,17 +386,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/louter_join_ppr.q.out b/ql/src/test/results/clientpositive/louter_join_ppr.q.out index d994b95..34aafa4 100644 --- a/ql/src/test/results/clientpositive/louter_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/louter_join_ppr.q.out @@ -232,17 +232,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -278,17 +283,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -608,17 +618,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -654,17 +669,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -700,17 +720,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -746,17 +771,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1091,17 +1121,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1137,17 +1172,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1463,17 +1503,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1509,17 +1554,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out b/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out index a32986c..9f397e0 100644 --- a/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/mapjoin_mapjoin.q.out @@ -287,17 +287,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -333,17 +338,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -379,17 +389,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -425,17 +440,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out b/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out index 8f5492f..65d896b 100644 --- a/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out +++ b/ql/src/test/results/clientpositive/mapjoin_memcheck.q.out @@ -44,14 +44,14 @@ STAGE PLANS: $hdt$_0:src1 TableScan alias: src1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -62,14 +62,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -77,10 +77,10 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/masking_1.q.out b/ql/src/test/results/clientpositive/masking_1.q.out index 3b63550..200cf8e 100644 --- a/ql/src/test/results/clientpositive/masking_1.q.out +++ b/ql/src/test/results/clientpositive/masking_1.q.out @@ -24,17 +24,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10)) (type: boolean) - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -74,17 +74,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -121,17 +121,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 80 Data size: 320 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 80 Data size: 320 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 80 Data size: 320 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -168,17 +168,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: reverse(value) (type: string) outputColumnNames: _col0 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -215,19 +215,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10)) (type: boolean) - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(_col0) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(_col0) (type: double) - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) TableScan alias: srcpart @@ -348,17 +348,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -395,17 +395,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/masking_3.q.out b/ql/src/test/results/clientpositive/masking_3.q.out index 1925dce..eabcb0d 100644 --- a/ql/src/test/results/clientpositive/masking_3.q.out +++ b/ql/src/test/results/clientpositive/masking_3.q.out @@ -24,19 +24,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: UDFToDouble(key) is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string), UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: double), _col3 (type: double) sort order: ++ Map-reduce partition columns: _col2 (type: double), _col3 (type: double) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) TableScan alias: src @@ -605,19 +605,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 0) and UDFToDouble(key) is not null) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string), UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: double), _col3 (type: double) sort order: ++ Map-reduce partition columns: _col2 (type: double), _col3 (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) TableScan alias: src @@ -1183,19 +1183,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 0) and UDFToDouble(key) is not null) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 484 Data size: 1936 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 484 Data size: 1936 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: double), _col2 (type: double) sort order: ++ Map-reduce partition columns: _col1 (type: double), _col2 (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 484 Data size: 1936 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) TableScan alias: src @@ -1761,19 +1761,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 0) and UDFToDouble(key) is not null) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string), UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: double), _col2 (type: double) sort order: ++ Map-reduce partition columns: _col1 (type: double), _col2 (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) TableScan alias: src @@ -2340,19 +2340,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) is not null and key is not null) (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string), UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: double), _col3 (type: double) sort order: ++ Map-reduce partition columns: _col2 (type: double), _col3 (type: double) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) TableScan alias: src @@ -6585,19 +6585,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 0) and UDFToDouble(key) is not null) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string), UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: double), _col3 (type: double) sort order: ++ Map-reduce partition columns: _col2 (type: double), _col3 (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) TableScan alias: src @@ -7163,19 +7163,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 0) and UDFToDouble(key) is not null) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string), UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: double), _col3 (type: double) sort order: ++ Map-reduce partition columns: _col2 (type: double), _col3 (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) TableScan alias: src diff --git a/ql/src/test/results/clientpositive/masking_4.q.out b/ql/src/test/results/clientpositive/masking_4.q.out index 7e923e8..318e71a 100644 --- a/ql/src/test/results/clientpositive/masking_4.q.out +++ b/ql/src/test/results/clientpositive/masking_4.q.out @@ -86,17 +86,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key = 5)) (type: boolean) - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: 5 (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -130,17 +130,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key = 5)) (type: boolean) - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: 5 (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -174,19 +174,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: UDFToDouble(key) is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string), UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col2 (type: double), _col3 (type: double) sort order: ++ Map-reduce partition columns: _col2 (type: double), _col3 (type: double) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) TableScan alias: src diff --git a/ql/src/test/results/clientpositive/masking_5.q.out b/ql/src/test/results/clientpositive/masking_5.q.out index acb6471..692bd08 100644 --- a/ql/src/test/results/clientpositive/masking_5.q.out +++ b/ql/src/test/results/clientpositive/masking_5.q.out @@ -25,17 +25,17 @@ STAGE PLANS: TableScan alias: masking_test Row Limit Per Split: 10 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10)) (type: boolean) - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -71,17 +71,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10)) (type: boolean) - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/masking_disablecbo_1.q.out b/ql/src/test/results/clientpositive/masking_disablecbo_1.q.out index 6717527..6d2be41 100644 --- a/ql/src/test/results/clientpositive/masking_disablecbo_1.q.out +++ b/ql/src/test/results/clientpositive/masking_disablecbo_1.q.out @@ -24,17 +24,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10)) (type: boolean) - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -74,17 +74,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -121,17 +121,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 80 Data size: 320 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 80 Data size: 320 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 80 Data size: 320 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -168,17 +168,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: reverse(value) (type: string) outputColumnNames: _col0 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -215,19 +215,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and UDFToDouble(key) is not null) (type: boolean) - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(_col0) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(_col0) (type: double) - Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int), _col1 (type: string) TableScan alias: srcpart @@ -344,17 +344,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -391,17 +391,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: boolean) - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 317 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out b/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out index 6aaab20..cebeccd 100644 --- a/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out +++ b/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out @@ -24,15 +24,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: UDFToDouble(key) is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) sort order: ++ Map-reduce partition columns: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int), value (type: string) TableScan alias: src @@ -601,15 +601,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) is not null and (key > 0)) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) sort order: ++ Map-reduce partition columns: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int), value (type: string) TableScan alias: src @@ -1175,15 +1175,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) is not null and (key > 0)) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 484 Data size: 1936 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) sort order: ++ Map-reduce partition columns: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 484 Data size: 1936 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int) TableScan alias: src @@ -1749,15 +1749,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) is not null and (key > 0)) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) sort order: ++ Map-reduce partition columns: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: src @@ -2328,15 +2328,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: UDFToDouble(key) is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) sort order: ++ Map-reduce partition columns: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int), value (type: string) TableScan alias: src @@ -6565,15 +6565,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) is not null and (key > 0)) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) sort order: ++ Map-reduce partition columns: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int), value (type: string) TableScan alias: src @@ -7139,15 +7139,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) is not null and (key > 0)) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) sort order: ++ Map-reduce partition columns: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 1902 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int), value (type: string) TableScan alias: src diff --git a/ql/src/test/results/clientpositive/masking_disablecbo_4.q.out b/ql/src/test/results/clientpositive/masking_disablecbo_4.q.out index 698c797..9b1151b 100644 --- a/ql/src/test/results/clientpositive/masking_disablecbo_4.q.out +++ b/ql/src/test/results/clientpositive/masking_disablecbo_4.q.out @@ -86,17 +86,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key = 5)) (type: boolean) - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: 5 (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -130,17 +130,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((key % 2) = 0) and (key < 10) and (key = 5)) (type: boolean) - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: 5 (type: int), reverse(value) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 41 Data size: 435 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 422 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -174,15 +174,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: masking_test_subq - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: UDFToDouble(key) is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) sort order: ++ Map-reduce partition columns: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int), value (type: string) TableScan alias: src diff --git a/ql/src/test/results/clientpositive/merge3.q.out b/ql/src/test/results/clientpositive/merge3.q.out index 5b581db..3c00d88 100644 --- a/ql/src/test/results/clientpositive/merge3.q.out +++ b/ql/src/test/results/clientpositive/merge3.q.out @@ -110,18 +110,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: merge_src - Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 1 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 2000 Data size: 21248 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 23248 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.TextInputFormat @@ -146,7 +146,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -154,8 +153,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.merge_src numFiles 4 - numRows 2000 - rawDataSize 21248 serialization.ddl struct merge_src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -166,7 +163,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -174,8 +170,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.merge_src numFiles 4 - numRows 2000 - rawDataSize 21248 serialization.ddl struct merge_src { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2369,10 +2363,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 2000 - rawDataSize 21248 totalSize 23248 #### A masked pattern was here #### @@ -2519,17 +2510,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -2564,17 +2560,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -4954,17 +4955,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -4999,17 +5005,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.merge_src_part + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part diff --git a/ql/src/test/results/clientpositive/metadataonly1.q.out b/ql/src/test/results/clientpositive/metadataonly1.q.out index d6d86bb..338292a 100644 --- a/ql/src/test/results/clientpositive/metadataonly1.q.out +++ b/ql/src/test/results/clientpositive/metadataonly1.q.out @@ -187,17 +187,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -330,17 +335,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -473,17 +483,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -654,17 +669,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -693,17 +713,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -817,17 +842,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -857,17 +887,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -1112,17 +1147,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1152,17 +1192,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1192,17 +1237,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1352,17 +1402,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1393,17 +1448,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1434,17 +1494,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1583,17 +1648,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -1622,17 +1692,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -1831,17 +1906,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1871,17 +1951,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1911,17 +1996,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1951,17 +2041,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1991,17 +2086,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 diff --git a/ql/src/test/results/clientpositive/multi_insert_lateral_view.q.out b/ql/src/test/results/clientpositive/multi_insert_lateral_view.q.out index 7964405..2113f7a 100644 --- a/ql/src/test/results/clientpositive/multi_insert_lateral_view.q.out +++ b/ql/src/test/results/clientpositive/multi_insert_lateral_view.q.out @@ -81,23 +81,23 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col5 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -106,41 +106,41 @@ STAGE PLANS: Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col5 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src_lv1 Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col5 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -149,20 +149,20 @@ STAGE PLANS: Select Operator expressions: array((key + 3),(key + 4)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col5 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -381,65 +381,65 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -449,19 +449,19 @@ STAGE PLANS: Select Operator expressions: array((key + 3),(key + 4)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -474,10 +474,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -505,7 +505,7 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Reduce Operator Tree: Group By Operator @@ -513,10 +513,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -628,53 +628,53 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Filter Operator predicate: ((key < 200) or (key > 200)) (type: boolean) - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -687,10 +687,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -718,23 +718,23 @@ STAGE PLANS: key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Forward - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (KEY._col0 > 200) (type: boolean) - Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -742,16 +742,16 @@ STAGE PLANS: name: default.src_lv2 Filter Operator predicate: (KEY._col0 < 200) (type: boolean) - Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -895,75 +895,75 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col5 (type: double), _col0 (type: string) outputColumnNames: _col5, _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col0) keys: _col5 (type: double), _col0 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col5 (type: double), _col0 (type: string) outputColumnNames: _col5, _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col0) keys: _col5 (type: double), _col0 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col5 (type: double), _col0 (type: string) outputColumnNames: _col5, _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col0) keys: _col5 (type: double), _col0 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -973,23 +973,23 @@ STAGE PLANS: Select Operator expressions: array((key + 3),(key + 4)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col5 (type: double), _col0 (type: string) outputColumnNames: _col5, _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col0) keys: _col5 (type: double), _col0 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -999,13 +999,13 @@ STAGE PLANS: Select Operator expressions: value (type: string), key (type: string) outputColumnNames: value, key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT key) keys: value (type: string), key (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1018,10 +1018,10 @@ STAGE PLANS: keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1049,17 +1049,17 @@ STAGE PLANS: key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator aggregations: sum(DISTINCT KEY._col1:0._col0) keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1087,17 +1087,17 @@ STAGE PLANS: key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator aggregations: sum(DISTINCT KEY._col1:0._col0) keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1258,63 +1258,63 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col5) keys: _col0 (type: string), _col5 (type: double) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col5) keys: _col0 (type: string), _col5 (type: double) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col5) keys: _col0 (type: string), _col5 (type: double) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1324,19 +1324,19 @@ STAGE PLANS: Select Operator expressions: array((key + 3),(key + 4)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col5) keys: _col0 (type: string), _col5 (type: double) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1345,7 +1345,7 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe Filter Operator predicate: ((key < 200) or (key > 200)) (type: boolean) - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1358,10 +1358,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1389,17 +1389,17 @@ STAGE PLANS: key expressions: _col0 (type: string), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator aggregations: sum(DISTINCT KEY._col1:0._col0) keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1427,22 +1427,22 @@ STAGE PLANS: key expressions: value (type: string), key (type: string) sort order: ++ Map-reduce partition columns: value (type: string) - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Forward - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (KEY._col1:0._col0 > 200) (type: boolean) - Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT KEY._col1:0._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1450,16 +1450,16 @@ STAGE PLANS: name: default.src_lv3 Filter Operator predicate: (KEY._col1:0._col0 < 200) (type: boolean) - Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT KEY._col1:0._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/multi_insert_union_src.q.out b/ql/src/test/results/clientpositive/multi_insert_union_src.q.out index 2036e63..9764e5f 100644 --- a/ql/src/test/results/clientpositive/multi_insert_union_src.q.out +++ b/ql/src/test/results/clientpositive/multi_insert_union_src.q.out @@ -71,18 +71,18 @@ STAGE PLANS: outputColumnNames: _col0, _col1 Statistics: Num rows: 8 Data size: 61 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 174 Data size: 1824 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1864 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col0 < 150) (type: boolean) - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Filter Operator predicate: (_col0 > 400) (type: boolean) - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -91,27 +91,27 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe TableScan alias: src2 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 100) (type: boolean) - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 1803 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 174 Data size: 1824 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1864 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col0 < 150) (type: boolean) - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Filter Operator predicate: (_col0 > 400) (type: boolean) - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -122,10 +122,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -152,16 +152,16 @@ STAGE PLANS: Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 58 Data size: 608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 548 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/multi_join_union.q.out b/ql/src/test/results/clientpositive/multi_join_union.q.out index b361a1a..ad009d4 100644 --- a/ql/src/test/results/clientpositive/multi_join_union.q.out +++ b/ql/src/test/results/clientpositive/multi_join_union.q.out @@ -82,14 +82,14 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -97,16 +97,16 @@ STAGE PLANS: $hdt$_2-subquery1:$hdt$_2-subquery1:src13 TableScan alias: src13 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col3 (type: string) @@ -114,16 +114,16 @@ STAGE PLANS: $hdt$_2-subquery2:$hdt$_2-subquery2:src14 TableScan alias: src14 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col3 (type: string) @@ -134,14 +134,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -149,7 +149,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -157,10 +157,10 @@ STAGE PLANS: 0 _col3 (type: string) 1 _col1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/nullformat.q.out b/ql/src/test/results/clientpositive/nullformat.q.out index af91470..2a80359 100644 --- a/ql/src/test/results/clientpositive/nullformat.q.out +++ b/ql/src/test/results/clientpositive/nullformat.q.out @@ -93,6 +93,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: -- load null data from another table and verify that the null is stored in the expected format INSERT OVERWRITE TABLE null_tab1 SELECT a,b FROM base_tab diff --git a/ql/src/test/results/clientpositive/nullformatCTAS.q.out b/ql/src/test/results/clientpositive/nullformatCTAS.q.out index 7686419..2219009 100644 --- a/ql/src/test/results/clientpositive/nullformatCTAS.q.out +++ b/ql/src/test/results/clientpositive/nullformatCTAS.q.out @@ -176,10 +176,7 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( - 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', 'numFiles'='1', - 'numRows'='10', - 'rawDataSize'='70', 'totalSize'='80', #### A masked pattern was here #### 1.01 diff --git a/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out b/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out index 0cd5c82..a0b0cfe 100644 --- a/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out +++ b/ql/src/test/results/clientpositive/offset_limit_global_optimizer.q.out @@ -116,17 +116,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -284,17 +289,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -330,17 +340,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -376,17 +391,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -422,17 +442,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -593,17 +618,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -639,17 +669,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -685,17 +720,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -731,17 +771,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -912,17 +957,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -958,17 +1008,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1004,17 +1059,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1050,17 +1110,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1811,17 +1876,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1979,17 +2049,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2025,17 +2100,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2071,17 +2151,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2117,17 +2202,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2288,17 +2378,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2334,17 +2429,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2380,17 +2480,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2426,17 +2531,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2607,17 +2717,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2653,17 +2768,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2699,17 +2819,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2745,17 +2870,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/optimize_nullscan.q.out index 023b71b..b92fbbc 100644 --- a/ql/src/test/results/clientpositive/optimize_nullscan.q.out +++ b/ql/src/test/results/clientpositive/optimize_nullscan.q.out @@ -376,17 +376,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -421,17 +426,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -466,17 +476,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -511,17 +526,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1033,17 +1053,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1079,17 +1104,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1125,17 +1155,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1171,17 +1206,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1360,17 +1400,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1405,17 +1450,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1450,17 +1500,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1495,17 +1550,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/orc_create.q.out b/ql/src/test/results/clientpositive/orc_create.q.out index 3013fda..9dfffac 100644 --- a/ql/src/test/results/clientpositive/orc_create.q.out +++ b/ql/src/test/results/clientpositive/orc_create.q.out @@ -70,6 +70,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -120,6 +125,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -173,6 +183,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -216,6 +231,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -269,6 +290,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -319,6 +345,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/orc_createas1.q.out b/ql/src/test/results/clientpositive/orc_createas1.q.out index 506f39d..dbd7548 100644 --- a/ql/src/test/results/clientpositive/orc_createas1.q.out +++ b/ql/src/test/results/clientpositive/orc_createas1.q.out @@ -160,28 +160,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_createas1b - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 5 - Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1000 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 880 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 1000 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/orc_llap.q.out b/ql/src/test/results/clientpositive/orc_llap.q.out index 6fc73b7..bae69bb 100644 --- a/ql/src/test/results/clientpositive/orc_llap.q.out +++ b/ql/src/test/results/clientpositive/orc_llap.q.out @@ -719,17 +719,17 @@ STAGE PLANS: TableScan alias: orc_llap filterExpr: ((cint > 10) and cbigint is not null) (type: boolean) - Statistics: Num rows: 98779 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((cint > 10) and cbigint is not null) (type: boolean) - Statistics: Num rows: 32926 Data size: 526817 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int), csmallint (type: smallint), cbigint (type: bigint) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 32926 Data size: 526817 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 32926 Data size: 526817 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -792,17 +792,17 @@ STAGE PLANS: TableScan alias: orc_llap filterExpr: ((cint > 10) and cbigint is not null) (type: boolean) - Statistics: Num rows: 4938 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((cint > 10) and cbigint is not null) (type: boolean) - Statistics: Num rows: 1646 Data size: 526823 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint), csmallint (type: smallint), cint (type: int), cbigint (type: bigint), cfloat (type: float), cdouble (type: double), cstring1 (type: string), cstring2 (type: string), ctimestamp1 (type: timestamp), ctimestamp2 (type: timestamp), cboolean1 (type: boolean), cboolean2 (type: boolean) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11 - Statistics: Num rows: 1646 Data size: 526823 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1646 Data size: 526823 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 81920 Data size: 19386626 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -874,17 +874,17 @@ STAGE PLANS: TableScan alias: orc_llap filterExpr: ((cint > 5) and (cint < 10)) (type: boolean) - Statistics: Num rows: 15196 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((cint > 5) and (cint < 10)) (type: boolean) - Statistics: Num rows: 1688 Data size: 175561 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27306 Data size: 6462051 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cstring2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1688 Data size: 175561 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27306 Data size: 6462051 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1688 Data size: 175561 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27306 Data size: 6462051 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -944,22 +944,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_llap - Statistics: Num rows: 7902 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cstring1 (type: string), cstring2 (type: string) outputColumnNames: cstring1, cstring2 - Statistics: Num rows: 7902 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() keys: cstring1 (type: string), cstring2 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 7902 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 7902 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: bigint) Execution mode: vectorized LLAP IO: all inputs @@ -969,10 +969,10 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 3951 Data size: 790234 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 122880 Data size: 29079940 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3951 Data size: 790234 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 122880 Data size: 29079940 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1039,14 +1039,14 @@ STAGE PLANS: TableScan alias: o1 filterExpr: (csmallint is not null and cbigint is not null) (type: boolean) - Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (csmallint is not null and cbigint is not null) (type: boolean) - Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: csmallint (type: smallint), cstring1 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: smallint) @@ -1058,14 +1058,14 @@ STAGE PLANS: TableScan alias: o1 filterExpr: (csmallint is not null and cbigint is not null) (type: boolean) - Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (csmallint is not null and cbigint is not null) (type: boolean) - Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: csmallint (type: smallint), cstring2 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 14111 Data size: 1580469 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 245760 Data size: 58159880 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -1073,14 +1073,14 @@ STAGE PLANS: 0 _col0 (type: smallint) 1 _col0 (type: smallint) outputColumnNames: _col2, _col5 - Statistics: Num rows: 15522 Data size: 1738515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 270336 Data size: 63975869 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col2 (type: string), _col5 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 15522 Data size: 1738515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 270336 Data size: 63975869 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 15522 Data size: 1738515 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 270336 Data size: 63975869 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out b/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out index 7b361b7..38321e9 100644 --- a/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out +++ b/ql/src/test/results/clientpositive/orc_predicate_pushdown.q.out @@ -135,11 +135,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hash(t) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: hash @@ -183,11 +183,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hash(t) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: hash @@ -311,14 +311,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean) - Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hash(t) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: hash @@ -369,14 +369,14 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean) - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean) - Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hash(t) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: hash @@ -458,17 +458,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t = -1) and s is not null and (s like 'bob%')) (type: boolean) - Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: -1 (type: tinyint), s (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -501,17 +501,17 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: ((t = -1) and s is not null and (s like 'bob%')) (type: boolean) - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t = -1) and s is not null and (s like 'bob%')) (type: boolean) - Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: -1 (type: tinyint), s (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 262 Data size: 77718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 6037 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -589,26 +589,26 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean) - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), s (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: string) sort order: ++ - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -645,26 +645,26 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean) - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean) - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), s (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: string) sort order: ++ - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 131 Data size: 38859 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 3018 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -766,31 +766,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean) - Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -835,31 +835,31 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean) - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean) - Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 1483 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 888 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -970,28 +970,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t > 10) and (t <> 101) and (d >= 10.0) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean) - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1006,20 +1006,20 @@ STAGE PLANS: Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1069,28 +1069,28 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: ((t > 10) and (t <> 101) and (d >= 10.0) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean) - Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t > 10) and (t <> 101) and (d >= 10.0) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean) - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1105,20 +1105,20 @@ STAGE PLANS: Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.7.out b/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.7.out index c471301..2c5639b 100644 --- a/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.7.out +++ b/ql/src/test/results/clientpositive/outer_join_ppr.q.java1.7.out @@ -226,17 +226,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -272,17 +277,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -318,17 +328,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -364,17 +379,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -705,17 +725,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -751,17 +776,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/parquet_array_null_element.q.out b/ql/src/test/results/clientpositive/parquet_array_null_element.q.out index 387f01e..75d2d27 100644 --- a/ql/src/test/results/clientpositive/parquet_array_null_element.q.out +++ b/ql/src/test/results/clientpositive/parquet_array_null_element.q.out @@ -70,6 +70,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/parquet_create.q.out b/ql/src/test/results/clientpositive/parquet_create.q.out index c6d33ff..cc4b735 100644 --- a/ql/src/test/results/clientpositive/parquet_create.q.out +++ b/ql/src/test/results/clientpositive/parquet_create.q.out @@ -73,6 +73,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/parquet_join.q.out b/ql/src/test/results/clientpositive/parquet_join.q.out index 86fb64a..ac5bea6 100644 --- a/ql/src/test/results/clientpositive/parquet_join.q.out +++ b/ql/src/test/results/clientpositive/parquet_join.q.out @@ -83,34 +83,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: p1 - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 345 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 345 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 345 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 345 Basic stats: COMPLETE Column stats: NONE TableScan alias: p2 - Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 482 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 482 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), myvalue (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 482 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 482 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -120,14 +120,14 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) outputColumnNames: _col2 - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 379 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 379 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 379 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -183,14 +183,14 @@ STAGE PLANS: $hdt$_0:p1 TableScan alias: p1 - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 345 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 345 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 345 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: int) @@ -201,14 +201,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: p2 - Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 482 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 482 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), myvalue (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 482 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -216,14 +216,14 @@ STAGE PLANS: 0 _col0 (type: int) 1 _col0 (type: int) outputColumnNames: _col2 - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 379 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 379 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 4 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 379 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out b/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out index d6affd6..782df2e 100644 --- a/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out +++ b/ql/src/test/results/clientpositive/parquet_mixed_partition_formats.q.out @@ -126,6 +126,8 @@ Table: parquet_mixed_partition_formats #### A masked pattern was here #### Partition Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 2521 #### A masked pattern was here #### @@ -197,6 +199,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -247,6 +255,8 @@ Table: parquet_mixed_partition_formats #### A masked pattern was here #### Partition Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 2521 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/parquet_partitioned.q.out b/ql/src/test/results/clientpositive/parquet_partitioned.q.out index 3529d70..75f843f 100644 --- a/ql/src/test/results/clientpositive/parquet_partitioned.q.out +++ b/ql/src/test/results/clientpositive/parquet_partitioned.q.out @@ -67,6 +67,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/parquet_serde.q.out b/ql/src/test/results/clientpositive/parquet_serde.q.out index c1e594a..dcb5599 100644 --- a/ql/src/test/results/clientpositive/parquet_serde.q.out +++ b/ql/src/test/results/clientpositive/parquet_serde.q.out @@ -72,6 +72,8 @@ Table: parquet_mixed_fileformat #### A masked pattern was here #### Partition Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 36 #### A masked pattern was here #### @@ -136,6 +138,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -173,6 +181,8 @@ Table: parquet_mixed_fileformat #### A masked pattern was here #### Partition Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 36 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out b/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out index 106448a..0be588a 100644 --- a/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out +++ b/ql/src/test/results/clientpositive/part_inherit_tbl_props_with_star.q.out @@ -38,9 +38,14 @@ Database: default Table: mytbl #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} a myval b yourval c noval + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/pcr.q.out b/ql/src/test/results/clientpositive/pcr.q.out index 684d4d7..43e9f0f 100644 --- a/ql/src/test/results/clientpositive/pcr.q.out +++ b/ql/src/test/results/clientpositive/pcr.q.out @@ -155,17 +155,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -200,17 +205,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -375,17 +385,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -420,17 +435,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -465,17 +485,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -690,17 +715,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -735,17 +765,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -929,17 +964,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -974,17 +1014,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1170,17 +1215,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1215,17 +1265,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1260,17 +1315,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1467,17 +1527,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1512,17 +1577,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1557,17 +1627,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1763,17 +1838,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1808,17 +1888,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1972,17 +2057,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2017,17 +2107,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2221,17 +2316,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2266,17 +2366,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2311,17 +2416,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2559,17 +2669,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2604,17 +2719,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2810,17 +2930,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3116,17 +3241,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3161,17 +3291,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3465,17 +3600,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3510,17 +3650,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3555,17 +3700,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3600,17 +3750,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3833,17 +3988,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3878,17 +4038,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3923,17 +4088,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4141,15 +4311,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4171,15 +4346,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4219,17 +4399,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4255,15 +4440,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4286,15 +4476,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4310,30 +4505,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4355,15 +4560,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4379,30 +4589,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4434,15 +4654,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4465,15 +4690,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4489,30 +4719,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4534,15 +4774,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4558,30 +4803,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4814,17 +5069,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -5381,17 +5641,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5569,17 +5834,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5615,17 +5885,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5797,17 +6072,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5843,17 +6123,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/pcs.q.out b/ql/src/test/results/clientpositive/pcs.q.out index d6d2431..83d962c 100644 --- a/ql/src/test/results/clientpositive/pcs.q.out +++ b/ql/src/test/results/clientpositive/pcs.q.out @@ -198,17 +198,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -243,17 +248,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -422,17 +432,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -467,17 +482,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -620,17 +640,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -665,17 +690,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -845,17 +875,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -890,17 +925,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -1083,17 +1123,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -1128,17 +1173,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -1570,17 +1620,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -1748,17 +1803,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -1793,17 +1853,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -1899,17 +1964,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -1942,17 +2012,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -1985,17 +2060,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -2125,17 +2205,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -2168,17 +2253,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 @@ -2211,17 +2301,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcs_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcs_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcs_t1 diff --git a/ql/src/test/results/clientpositive/pointlookup2.q.out b/ql/src/test/results/clientpositive/pointlookup2.q.out index 6fc6e7f..cacc087 100644 --- a/ql/src/test/results/clientpositive/pointlookup2.q.out +++ b/ql/src/test/results/clientpositive/pointlookup2.q.out @@ -214,17 +214,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -259,17 +264,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -460,17 +470,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -726,17 +741,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -771,17 +791,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1048,17 +1073,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1093,17 +1123,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1423,17 +1458,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1468,17 +1508,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1513,17 +1558,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 diff --git a/ql/src/test/results/clientpositive/pointlookup3.q.out b/ql/src/test/results/clientpositive/pointlookup3.q.out index 2b25b39..abcc300 100644 --- a/ql/src/test/results/clientpositive/pointlookup3.q.out +++ b/ql/src/test/results/clientpositive/pointlookup3.q.out @@ -176,17 +176,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -222,17 +227,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -421,17 +431,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -622,17 +637,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -889,17 +909,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -935,17 +960,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1213,17 +1243,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1259,17 +1294,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1305,17 +1345,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 diff --git a/ql/src/test/results/clientpositive/pointlookup4.q.out b/ql/src/test/results/clientpositive/pointlookup4.q.out index 6236272..e69ac7c 100644 --- a/ql/src/test/results/clientpositive/pointlookup4.q.out +++ b/ql/src/test/results/clientpositive/pointlookup4.q.out @@ -186,17 +186,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -232,17 +237,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -431,17 +441,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -477,17 +492,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds1/ds2 partition_columns.types string:string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 diff --git a/ql/src/test/results/clientpositive/ppd_union_view.q.out b/ql/src/test/results/clientpositive/ppd_union_view.q.out index 36bb214..b40f3b5 100644 --- a/ql/src/test/results/clientpositive/ppd_union_view.q.out +++ b/ql/src/test/results/clientpositive/ppd_union_view.q.out @@ -243,17 +243,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,keymap columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_mapping + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct t1_mapping { string key, string keymap} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_mapping @@ -288,17 +293,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns keymap,value columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_old + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct t1_old { string keymap, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_old @@ -713,17 +723,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.t1_new + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct t1_new { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_new diff --git a/ql/src/test/results/clientpositive/ppd_vc.q.out b/ql/src/test/results/clientpositive/ppd_vc.q.out index cc25e80..efb0d0d 100644 --- a/ql/src/test/results/clientpositive/ppd_vc.q.out +++ b/ql/src/test/results/clientpositive/ppd_vc.q.out @@ -105,17 +105,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -151,17 +156,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -197,17 +207,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -243,17 +258,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -531,17 +551,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -577,17 +602,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -623,17 +653,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -669,17 +704,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out b/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out index b1f280f..5af9a70 100644 --- a/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out +++ b/ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out @@ -136,17 +136,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -182,17 +187,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -369,17 +379,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -415,17 +430,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -461,17 +481,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -507,17 +532,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/push_or.q.out b/ql/src/test/results/clientpositive/push_or.q.out index bc5c930..4a9095d 100644 --- a/ql/src/test/results/clientpositive/push_or.q.out +++ b/ql/src/test/results/clientpositive/push_or.q.out @@ -137,17 +137,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.push_or + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct push_or { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.push_or @@ -182,17 +187,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.push_or + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct push_or { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.push_or diff --git a/ql/src/test/results/clientpositive/query_result_fileformat.q.out b/ql/src/test/results/clientpositive/query_result_fileformat.q.out index bce3e22..850b6f6 100644 --- a/ql/src/test/results/clientpositive/query_result_fileformat.q.out +++ b/ql/src/test/results/clientpositive/query_result_fileformat.q.out @@ -53,17 +53,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: nzhang_test1 - Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'key1') (type: boolean) - Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: 'key1' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -124,17 +124,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: nzhang_test1 - Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 'key1') (type: boolean) - Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: 'key1' (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 25 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out index 4559b39..648ce42 100644 --- a/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out +++ b/ql/src/test/results/clientpositive/rand_partitionpruner2.q.out @@ -88,15 +88,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable @@ -137,17 +142,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -183,17 +193,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -220,15 +235,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable @@ -251,15 +271,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable @@ -275,30 +300,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable @@ -320,15 +355,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable @@ -344,30 +384,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,hr,ds columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.tmptable + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct tmptable { string key, string value, string hr, string ds} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.tmptable diff --git a/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out b/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out index 9ee3462..2af0844 100644 --- a/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out +++ b/ql/src/test/results/clientpositive/rand_partitionpruner3.q.out @@ -93,17 +93,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -225,17 +230,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/rcfile_default_format.q.out b/ql/src/test/results/clientpositive/rcfile_default_format.q.out index c961231..abc821b 100644 --- a/ql/src/test/results/clientpositive/rcfile_default_format.q.out +++ b/ql/src/test/results/clientpositive/rcfile_default_format.q.out @@ -23,6 +23,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -65,10 +70,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 500 - rawDataSize 4812 totalSize 5293 #### A masked pattern was here #### @@ -163,10 +165,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 500 - rawDataSize 5312 totalSize 5812 #### A masked pattern was here #### @@ -210,10 +209,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 500 - rawDataSize 4812 totalSize 5293 #### A masked pattern was here #### @@ -252,6 +248,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -294,10 +295,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 500 - rawDataSize 4812 totalSize 5293 #### A masked pattern was here #### @@ -336,6 +334,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/repl_2_exim_basic.q.out b/ql/src/test/results/clientpositive/repl_2_exim_basic.q.out index b2cbea9..8a53330 100644 --- a/ql/src/test/results/clientpositive/repl_2_exim_basic.q.out +++ b/ql/src/test/results/clientpositive/repl_2_exim_basic.q.out @@ -197,6 +197,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: select * from managed_t_imported PREHOOK: type: QUERY @@ -279,7 +284,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'repl.last.id'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: select * from managed_t_r_imported PREHOOK: type: QUERY @@ -361,6 +370,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: select * from ext_t_imported PREHOOK: type: QUERY @@ -446,7 +460,11 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( 'EXTERNAL'='FALSE', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'repl.last.id'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: select * from ext_t_r_imported PREHOOK: type: QUERY diff --git a/ql/src/test/results/clientpositive/repl_3_exim_metadata.q.out b/ql/src/test/results/clientpositive/repl_3_exim_metadata.q.out index 8387c02..26b0aa6 100644 --- a/ql/src/test/results/clientpositive/repl_3_exim_metadata.q.out +++ b/ql/src/test/results/clientpositive/repl_3_exim_metadata.q.out @@ -117,7 +117,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'repl.last.id'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: select * from repldst PREHOOK: type: QUERY @@ -194,7 +198,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'repl.last.id'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: select * from repldst_md PREHOOK: type: QUERY diff --git a/ql/src/test/results/clientpositive/router_join_ppr.q.out b/ql/src/test/results/clientpositive/router_join_ppr.q.out index f149058..029bb41 100644 --- a/ql/src/test/results/clientpositive/router_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/router_join_ppr.q.out @@ -232,17 +232,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -278,17 +283,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -324,17 +334,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -370,17 +385,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -711,17 +731,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -757,17 +782,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1091,17 +1121,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1137,17 +1172,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1467,17 +1507,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1513,17 +1558,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/sample1.q.out b/ql/src/test/results/clientpositive/sample1.q.out index caf62f4..f732aeb 100644 --- a/ql/src/test/results/clientpositive/sample1.q.out +++ b/ql/src/test/results/clientpositive/sample1.q.out @@ -94,15 +94,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -143,17 +148,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -179,15 +189,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -210,15 +225,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -234,30 +254,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -279,15 +309,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -303,30 +338,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/sample10.q.out b/ql/src/test/results/clientpositive/sample10.q.out index bf0cb59..5d0a2e7 100644 --- a/ql/src/test/results/clientpositive/sample10.q.out +++ b/ql/src/test/results/clientpositive/sample10.q.out @@ -154,6 +154,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -161,11 +162,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.srcpartbucket + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -202,6 +207,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -209,11 +215,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.srcpartbucket + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -250,6 +260,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -257,11 +268,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.srcpartbucket + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -298,6 +313,7 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -305,11 +321,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.srcpartbucket + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket diff --git a/ql/src/test/results/clientpositive/sample2.q.out b/ql/src/test/results/clientpositive/sample2.q.out index e70edd7..06415a2 100644 --- a/ql/src/test/results/clientpositive/sample2.q.out +++ b/ql/src/test/results/clientpositive/sample2.q.out @@ -79,15 +79,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -164,15 +169,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -195,15 +205,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -219,30 +234,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -264,15 +289,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -288,30 +318,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/sample4.q.out b/ql/src/test/results/clientpositive/sample4.q.out index 90dfc26..e4dc9c2 100644 --- a/ql/src/test/results/clientpositive/sample4.q.out +++ b/ql/src/test/results/clientpositive/sample4.q.out @@ -81,15 +81,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -166,15 +171,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -197,15 +207,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -221,30 +236,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -266,15 +291,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -290,30 +320,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/sample5.q.out b/ql/src/test/results/clientpositive/sample5.q.out index 04aa8cc..ee43304 100644 --- a/ql/src/test/results/clientpositive/sample5.q.out +++ b/ql/src/test/results/clientpositive/sample5.q.out @@ -82,15 +82,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -167,15 +172,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -198,15 +208,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -222,30 +237,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -267,15 +292,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -291,30 +321,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/sample6.q.out b/ql/src/test/results/clientpositive/sample6.q.out index c2be9d6..d44f8e8 100644 --- a/ql/src/test/results/clientpositive/sample6.q.out +++ b/ql/src/test/results/clientpositive/sample6.q.out @@ -79,15 +79,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -164,15 +169,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -195,15 +205,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -219,30 +234,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -264,15 +289,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -288,30 +318,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/sample7.q.out b/ql/src/test/results/clientpositive/sample7.q.out index e1897ee..2c995a2 100644 --- a/ql/src/test/results/clientpositive/sample7.q.out +++ b/ql/src/test/results/clientpositive/sample7.q.out @@ -87,15 +87,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -172,15 +177,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -203,15 +213,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -227,30 +242,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -272,15 +297,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -296,30 +326,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/sample8.q.out b/ql/src/test/results/clientpositive/sample8.q.out index 3f50ed2..ef70748 100644 --- a/ql/src/test/results/clientpositive/sample8.q.out +++ b/ql/src/test/results/clientpositive/sample8.q.out @@ -158,17 +158,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -204,17 +209,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -250,17 +260,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -296,17 +311,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/sample_islocalmode_hook_use_metadata.q.out b/ql/src/test/results/clientpositive/sample_islocalmode_hook_use_metadata.q.out index d268837..6893751 100644 --- a/ql/src/test/results/clientpositive/sample_islocalmode_hook_use_metadata.q.out +++ b/ql/src/test/results/clientpositive/sample_islocalmode_hook_use_metadata.q.out @@ -95,10 +95,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 1500 - rawDataSize 18124 totalSize 19624 #### A masked pattern was here #### @@ -115,12 +112,44 @@ Storage Desc Params: PREHOOK: query: explain select count(1) from sih_src PREHOOK: type: QUERY STAGE DEPENDENCIES: - Stage-0 is a root stage + Stage-1 is a root stage + Stage-0 depends on stages: Stage-1 STAGE PLANS: + Stage: Stage-1 + Map Reduce + Map Operator Tree: + TableScan + alias: sih_src + Statistics: Num rows: 1 Data size: 19624 Basic stats: COMPLETE Column stats: COMPLETE + Select Operator + Statistics: Num rows: 1 Data size: 19624 Basic stats: COMPLETE Column stats: COMPLETE + Group By Operator + aggregations: count(1) + mode: hash + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + Reduce Output Operator + sort order: + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + value expressions: _col0 (type: bigint) + Reduce Operator Tree: + Group By Operator + aggregations: count(VALUE._col0) + mode: mergepartial + outputColumnNames: _col0 + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + File Output Operator + compressed: false + Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE + table: + input format: org.apache.hadoop.mapred.SequenceFileInputFormat + output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat + serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + Stage: Stage-0 Fetch Operator - limit: 1 + limit: -1 Processor Tree: ListSink @@ -141,9 +170,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: sih_src - Statistics: Num rows: 1500 Data size: 18124 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 19624 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 1500 Data size: 18124 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 19624 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(1) mode: hash @@ -191,9 +220,9 @@ STAGE PLANS: TableScan alias: sih_src Row Limit Per Split: 10 - Statistics: Num rows: 1500 Data size: 18124 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 19624 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 1500 Data size: 18124 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 19624 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(1) mode: hash diff --git a/ql/src/test/results/clientpositive/schema_evol_stats.q.out b/ql/src/test/results/clientpositive/schema_evol_stats.q.out index 63dab2e..dd1c28f 100644 --- a/ql/src/test/results/clientpositive/schema_evol_stats.q.out +++ b/ql/src/test/results/clientpositive/schema_evol_stats.q.out @@ -75,6 +75,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -275,6 +281,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/semijoin.q.out b/ql/src/test/results/clientpositive/semijoin.q.out index 25f62a2..c780e84 100644 --- a/ql/src/test/results/clientpositive/semijoin.q.out +++ b/ql/src/test/results/clientpositive/semijoin.q.out @@ -139,36 +139,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -177,7 +177,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -192,15 +192,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -243,36 +243,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -281,7 +281,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -296,15 +296,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 99 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -349,15 +349,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: b @@ -387,7 +387,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -402,15 +402,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -447,36 +447,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: b - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key < 15) (type: boolean) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col1 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col1 (type: int), _col1 (type: int) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -485,11 +485,11 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -504,15 +504,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -560,36 +560,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -598,7 +598,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -613,15 +613,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -661,35 +661,35 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t3 - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 5) (type: boolean) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator @@ -699,11 +699,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -718,15 +718,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -766,35 +766,35 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator @@ -804,11 +804,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -823,15 +823,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -868,35 +868,35 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: NONE TableScan alias: a - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reduce Operator Tree: Join Operator @@ -906,7 +906,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 30 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -921,15 +921,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 30 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 30 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 30 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -976,19 +976,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -999,10 +999,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -1010,21 +1010,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 203 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 203 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 203 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 203 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1080,36 +1080,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: (2 * _col0) (type: int) sort order: + Map-reduce partition columns: (2 * _col0) (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1118,7 +1118,7 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1133,15 +1133,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1182,48 +1182,48 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: c - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1234,11 +1234,11 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1253,16 +1253,16 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: int), _col3 (type: string) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1315,35 +1315,35 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int), value (type: string) sort order: ++ Map-reduce partition columns: key (type: int), value (type: string) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: int), _col1 (type: string) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1352,7 +1352,7 @@ STAGE PLANS: 0 key (type: int), value (type: string) 1 _col0 (type: int), _col1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1367,15 +1367,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1434,19 +1434,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1455,19 +1455,19 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1479,10 +1479,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -1492,21 +1492,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1559,37 +1559,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1600,7 +1600,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1615,15 +1615,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1688,37 +1688,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1729,7 +1729,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1744,15 +1744,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1820,37 +1820,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1861,7 +1861,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1876,15 +1876,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1952,37 +1952,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1993,7 +1993,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -2008,15 +2008,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2084,37 +2084,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -2125,7 +2125,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -2140,15 +2140,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2218,37 +2218,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -2259,7 +2259,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -2274,15 +2274,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2364,36 +2364,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -2402,7 +2402,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -2418,16 +2418,16 @@ STAGE PLANS: key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) TableScan alias: c - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -2436,7 +2436,7 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 196 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -2451,15 +2451,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 196 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 196 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 196 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2532,40 +2532,40 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 100) and value is not null) (type: boolean) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -2574,10 +2574,10 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/semijoin3.q.out b/ql/src/test/results/clientpositive/semijoin3.q.out index aea7e45..a005bb0 100644 --- a/ql/src/test/results/clientpositive/semijoin3.q.out +++ b/ql/src/test/results/clientpositive/semijoin3.q.out @@ -57,30 +57,30 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: 1 (type: int) sort order: + Map-reduce partition columns: 1 (type: int) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 2904 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1453 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key = 0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: 1 (type: int) sort order: + Map-reduce partition columns: 1 (type: int) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 726 Data size: 2904 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -88,7 +88,7 @@ STAGE PLANS: keys: 0 1 (type: int) 1 1 (type: int) - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 798 Data size: 3194 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) mode: hash diff --git a/ql/src/test/results/clientpositive/show_create_table_alter.q.out b/ql/src/test/results/clientpositive/show_create_table_alter.q.out index 32819ea..d09f30b 100644 --- a/ql/src/test/results/clientpositive/show_create_table_alter.q.out +++ b/ql/src/test/results/clientpositive/show_create_table_alter.q.out @@ -35,6 +35,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: -- Add a comment to the table, change the EXTERNAL property, and test SHOW CREATE TABLE on the change. ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE') @@ -73,6 +78,8 @@ TBLPROPERTIES ( 'EXTERNAL'='FALSE', #### A masked pattern was here #### 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: -- Alter the table comment, change the EXTERNAL property back and test SHOW CREATE TABLE on the change. @@ -111,6 +118,8 @@ LOCATION TBLPROPERTIES ( #### A masked pattern was here #### 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: -- Change the 'SORTBUCKETCOLSPREFIX' property and test SHOW CREATE TABLE. The output should not change. @@ -149,6 +158,8 @@ LOCATION TBLPROPERTIES ( #### A masked pattern was here #### 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: -- Alter the storage handler of the table, and test SHOW CREATE TABLE. @@ -187,6 +198,8 @@ LOCATION TBLPROPERTIES ( #### A masked pattern was here #### 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 diff --git a/ql/src/test/results/clientpositive/show_create_table_db_table.q.out b/ql/src/test/results/clientpositive/show_create_table_db_table.q.out index 495f4b5..daf63e9 100644 --- a/ql/src/test/results/clientpositive/show_create_table_db_table.q.out +++ b/ql/src/test/results/clientpositive/show_create_table_db_table.q.out @@ -46,6 +46,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_feng.tmp_showcrt PREHOOK: type: DROPTABLE diff --git a/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out b/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out index 100fde6..2b0a63c 100644 --- a/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out +++ b/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out @@ -35,6 +35,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE diff --git a/ql/src/test/results/clientpositive/show_create_table_serde.q.out b/ql/src/test/results/clientpositive/show_create_table_serde.q.out index 2350d98..a7bcb44 100644 --- a/ql/src/test/results/clientpositive/show_create_table_serde.q.out +++ b/ql/src/test/results/clientpositive/show_create_table_serde.q.out @@ -42,6 +42,8 @@ LOCATION TBLPROPERTIES ( #### A masked pattern was here #### 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 @@ -90,6 +92,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE @@ -139,6 +146,11 @@ OUTPUTFORMAT LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE @@ -183,6 +195,11 @@ WITH SERDEPROPERTIES ( LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'COLUMN_STATS_ACCURATE'='{\"BASIC_STATS\":\"true\"}', + 'numFiles'='0', + 'numRows'='0', + 'rawDataSize'='0', + 'totalSize'='0', #### A masked pattern was here #### PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE diff --git a/ql/src/test/results/clientpositive/show_tblproperties.q.out b/ql/src/test/results/clientpositive/show_tblproperties.q.out index 63bbe6d..e1c6670 100644 --- a/ql/src/test/results/clientpositive/show_tblproperties.q.out +++ b/ql/src/test/results/clientpositive/show_tblproperties.q.out @@ -39,6 +39,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES bar bar value #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 tmp true totalSize 0 #### A masked pattern was here #### @@ -54,6 +56,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES bar bar value #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 tmp true totalSize 0 #### A masked pattern was here #### @@ -107,6 +111,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES bar bar value #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 tmp true totalSize 0 #### A masked pattern was here #### @@ -124,6 +130,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES bar bar value1 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 tmp true1 totalSize 0 #### A masked pattern was here #### @@ -147,6 +155,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES bar bar value1 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 tmp true1 totalSize 0 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_11.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_11.q.out index b407402..20a738d 100644 --- a/ql/src/test/results/clientpositive/smb_mapjoin_11.q.out +++ b/ql/src/test/results/clientpositive/smb_mapjoin_11.q.out @@ -189,6 +189,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -197,11 +198,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 @@ -224,6 +229,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 16 bucket_field_name key columns key,value @@ -231,11 +237,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table3 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -254,6 +264,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 16 bucket_field_name key columns key,value @@ -261,11 +272,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table3 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -1938,6 +1953,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -1946,11 +1962,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 @@ -2064,6 +2084,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 16 bucket_field_name key columns key,value @@ -2071,11 +2092,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table3 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 diff --git a/ql/src/test/results/clientpositive/smb_mapjoin_12.q.out b/ql/src/test/results/clientpositive/smb_mapjoin_12.q.out index e2723a0..c4ffd84 100644 --- a/ql/src/test/results/clientpositive/smb_mapjoin_12.q.out +++ b/ql/src/test/results/clientpositive/smb_mapjoin_12.q.out @@ -171,6 +171,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -179,11 +180,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table3 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -224,6 +229,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -232,11 +238,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 @@ -255,6 +265,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -263,11 +274,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table3 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -432,6 +447,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -440,11 +456,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table3 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -485,6 +505,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -493,11 +514,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table3 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -516,6 +541,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 16 bucket_field_name key @@ -524,11 +550,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.test_table3 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 diff --git a/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out b/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out index 29dee15..bb2bf99 100644 --- a/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out +++ b/ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out @@ -179,6 +179,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 1 bucket_field_name key @@ -187,11 +188,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out b/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out index b1f99e5..a0a2f5b 100644 --- a/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out +++ b/ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out @@ -153,6 +153,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -160,11 +161,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -260,6 +265,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -268,11 +274,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out b/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out index f788fec..873ad55 100644 --- a/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out +++ b/ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out @@ -189,6 +189,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -196,11 +197,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -234,6 +239,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -241,11 +247,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -333,6 +343,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -340,11 +351,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -380,6 +395,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -387,11 +403,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out index c32a0dd..0ae165a 100644 --- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out +++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_1.q.out @@ -206,8 +206,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -218,6 +220,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -226,11 +229,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -251,8 +258,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -263,6 +272,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -271,11 +281,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -438,8 +452,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -450,6 +466,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -458,11 +475,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -483,8 +504,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -495,6 +518,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -503,11 +527,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -660,8 +688,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -672,6 +702,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -680,11 +711,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -761,8 +796,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -773,6 +810,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -781,11 +819,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -806,8 +848,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -818,6 +862,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -826,11 +871,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out index 00d5138..3811c73 100644 --- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out +++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_3.q.out @@ -186,8 +186,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -198,6 +200,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -206,11 +209,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -372,8 +379,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -384,6 +393,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -392,11 +402,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -548,8 +562,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -560,6 +576,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -568,11 +585,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -593,8 +614,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -605,6 +628,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -613,11 +637,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -695,8 +723,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -707,6 +737,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -715,11 +746,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out index b06df04..a183223 100644 --- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out +++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_4.q.out @@ -202,8 +202,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -214,6 +216,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -222,11 +225,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -388,8 +395,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -400,6 +409,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -408,11 +418,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -564,8 +578,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -576,6 +592,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -584,11 +601,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -609,8 +630,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -621,6 +644,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -629,11 +653,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -711,8 +739,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -723,6 +753,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -731,11 +762,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out index 18ab5c8..01375c9 100644 --- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out +++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_5.q.out @@ -172,6 +172,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -191,6 +193,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -349,6 +353,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -368,6 +374,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -516,6 +524,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -535,6 +545,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -614,6 +626,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -633,6 +647,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out index dd01e69..4363d0b 100644 --- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out +++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_7.q.out @@ -219,8 +219,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -231,6 +233,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -239,11 +242,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -264,8 +271,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -276,6 +285,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -284,11 +294,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -453,8 +467,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -465,6 +481,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -473,11 +490,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -498,8 +519,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -510,6 +533,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -518,11 +542,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -677,8 +705,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -689,6 +719,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -697,11 +728,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -722,8 +757,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -734,6 +771,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -742,11 +780,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -824,8 +866,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -836,6 +880,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -844,11 +889,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -869,8 +918,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -881,6 +932,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -889,11 +941,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out index f0f27f0..fb34110 100644 --- a/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out +++ b/ql/src/test/results/clientpositive/spark/auto_sortmerge_join_8.q.out @@ -219,8 +219,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -231,6 +233,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -239,11 +242,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -264,8 +271,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -276,6 +285,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -284,11 +294,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -453,8 +467,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -465,6 +481,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -473,11 +490,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -498,8 +519,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -510,6 +533,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -518,11 +542,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -679,8 +707,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -691,6 +721,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -699,11 +730,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -724,8 +759,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -736,6 +773,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -744,11 +782,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -826,8 +868,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -838,6 +882,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -846,11 +891,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -871,8 +920,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -883,6 +934,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -891,11 +943,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/spark/bucket2.q.out b/ql/src/test/results/clientpositive/spark/bucket2.q.out index f4f87c2..ba50306 100644 --- a/ql/src/test/results/clientpositive/spark/bucket2.q.out +++ b/ql/src/test/results/clientpositive/spark/bucket2.q.out @@ -132,6 +132,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -139,9 +140,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 @@ -158,6 +163,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -165,9 +171,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 diff --git a/ql/src/test/results/clientpositive/spark/bucket3.q.out b/ql/src/test/results/clientpositive/spark/bucket3.q.out index 96b2818..eea946b 100644 --- a/ql/src/test/results/clientpositive/spark/bucket3.q.out +++ b/ql/src/test/results/clientpositive/spark/bucket3.q.out @@ -137,6 +137,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -144,11 +145,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket3_1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 @@ -167,6 +172,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -174,11 +180,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket3_1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 diff --git a/ql/src/test/results/clientpositive/spark/bucket4.q.out b/ql/src/test/results/clientpositive/spark/bucket4.q.out index 4049f22..67210bc 100644 --- a/ql/src/test/results/clientpositive/spark/bucket4.q.out +++ b/ql/src/test/results/clientpositive/spark/bucket4.q.out @@ -129,6 +129,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -137,9 +138,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket4_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket4_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket4_1 @@ -156,6 +161,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -164,9 +170,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket4_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket4_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket4_1 diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out index 8764bc3..b291969 100644 --- a/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out +++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin10.q.out @@ -237,8 +237,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 3 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -249,6 +251,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -256,11 +259,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -281,8 +288,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -293,6 +302,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -300,11 +310,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -372,8 +386,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -384,6 +400,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -391,11 +408,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -416,8 +437,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 3 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -428,6 +451,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 3 bucket_field_name key columns key,value @@ -435,11 +459,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out index 58ac20c..08de00d 100644 --- a/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out +++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin11.q.out @@ -252,8 +252,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 4 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -264,6 +266,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -271,11 +274,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -296,8 +303,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -308,6 +317,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -315,11 +325,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -393,8 +407,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -405,6 +421,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -412,11 +429,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -437,8 +458,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 4 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -449,6 +472,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -456,11 +480,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -650,8 +678,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 4 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -662,6 +692,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -669,11 +700,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -694,8 +729,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -706,6 +743,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -713,11 +751,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -791,8 +833,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -803,6 +847,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -810,11 +855,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -835,8 +884,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 4 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -847,6 +898,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -854,11 +906,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out index bde9085..850aa27 100644 --- a/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out +++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin12.q.out @@ -211,8 +211,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -223,17 +225,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -306,8 +313,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -318,6 +327,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -325,11 +335,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -502,8 +516,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_3 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -514,6 +530,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -521,11 +538,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_3 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_3 @@ -592,8 +613,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 numFiles 2 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -604,6 +627,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -611,11 +635,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin13.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin13.q.out index 7a4d32a..1478a2b 100644 --- a/ql/src/test/results/clientpositive/spark/bucketmapjoin13.q.out +++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin13.q.out @@ -180,6 +180,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -187,11 +188,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -273,6 +278,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -280,11 +286,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -320,6 +330,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -327,11 +338,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -521,6 +536,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -528,11 +544,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -620,6 +640,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -627,11 +648,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -819,6 +844,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -826,11 +852,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -918,6 +948,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -925,11 +956,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -1119,6 +1154,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -1126,11 +1162,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1218,6 +1258,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name value columns key,value @@ -1225,11 +1266,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out index 98c19b4..f33a3d5 100644 --- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out +++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative2.q.out @@ -186,8 +186,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -198,6 +200,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -205,11 +208,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -230,8 +237,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -242,6 +251,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -249,11 +259,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -303,15 +317,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -342,6 +361,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -360,6 +381,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -380,15 +403,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result diff --git a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out index dd59399..fc24ec7 100644 --- a/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out +++ b/ql/src/test/results/clientpositive/spark/bucketmapjoin_negative3.q.out @@ -244,6 +244,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -263,6 +265,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -351,6 +355,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -370,6 +376,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -487,6 +495,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -506,6 +516,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -594,6 +606,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -613,6 +627,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -722,6 +738,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -741,6 +759,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -823,6 +843,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -842,6 +864,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -954,6 +978,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -973,6 +999,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1055,6 +1083,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1074,6 +1104,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1186,6 +1218,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test3 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1205,6 +1239,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test3 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1287,6 +1323,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1306,6 +1344,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1418,6 +1458,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test4 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1437,6 +1479,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test4 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1519,6 +1563,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1538,6 +1584,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test1 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test1 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1650,6 +1698,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test3 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1669,6 +1719,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test3 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1751,6 +1803,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1770,6 +1824,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1882,6 +1938,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test4 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1901,6 +1959,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test4 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1983,6 +2043,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2002,6 +2064,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test2 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test2 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2114,6 +2178,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test4 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2133,6 +2199,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test4 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test4 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2215,6 +2283,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test3 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2234,6 +2304,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.test3 numFiles 3 + numRows 0 + rawDataSize 0 serialization.ddl struct test3 { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/spark/ctas.q.out b/ql/src/test/results/clientpositive/spark/ctas.q.out index 086ad73..cecc1c2 100644 --- a/ql/src/test/results/clientpositive/spark/ctas.q.out +++ b/ql/src/test/results/clientpositive/spark/ctas.q.out @@ -150,10 +150,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### @@ -297,10 +294,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### @@ -444,10 +438,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 120 totalSize 199 #### A masked pattern was here #### @@ -508,10 +499,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 120 totalSize 199 #### A masked pattern was here #### @@ -656,10 +644,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out index fc2ff7b..f8a45d2 100644 --- a/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out +++ b/ql/src/test/results/clientpositive/spark/disable_merge_for_bucketing.q.out @@ -128,6 +128,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -135,9 +136,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 @@ -154,6 +159,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -161,9 +167,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 diff --git a/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out b/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out index 97a9a6d..48335cc 100644 --- a/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby_ppr.q.out @@ -159,17 +159,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -205,17 +210,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -247,15 +257,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2 columns.comments columns.types string:int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -272,15 +287,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2 columns.comments columns.types string:int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out b/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out index a8724f5..5b9d893 100644 --- a/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby_ppr_multi_distinct.q.out @@ -176,17 +176,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -222,17 +227,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -264,15 +274,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2,c3,c4 columns.comments columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -289,15 +304,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,c1,c2,c3,c4 columns.comments columns.types string:int:string:int:int #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { string key, i32 c1, string c2, i32 c3, i32 c4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out b/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out index 4e7e72e..fe861c6 100644 --- a/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out +++ b/ql/src/test/results/clientpositive/spark/groupby_sort_1_23.q.out @@ -120,15 +120,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -198,15 +203,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,cnt columns.comments columns.types int:int #### A masked pattern was here #### name default.outputtbl1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl1 @@ -402,15 +412,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2 @@ -427,15 +442,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:string:int #### A masked pattern was here #### name default.outputtbl2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl2 { i32 key1, string key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl2 @@ -1004,15 +1024,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1082,15 +1107,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,cnt columns.comments columns.types int:int:int #### A masked pattern was here #### name default.outputtbl3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl3 @@ -1290,15 +1320,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,cnt columns.comments columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4 @@ -1315,15 +1350,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,cnt columns.comments columns.types int:int:string:int #### A masked pattern was here #### name default.outputtbl4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl4 @@ -3905,15 +3945,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 @@ -3983,15 +4028,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key1,key2,key3,key4,cnt columns.comments columns.types int:int:string:int:int #### A masked pattern was here #### name default.outputtbl5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct outputtbl5 { i32 key1, i32 key2, string key3, i32 key4, i32 cnt} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.outputtbl5 diff --git a/ql/src/test/results/clientpositive/spark/join9.q.out b/ql/src/test/results/clientpositive/spark/join9.q.out index 5f26aaf..d191de0 100644 --- a/ql/src/test/results/clientpositive/spark/join9.q.out +++ b/ql/src/test/results/clientpositive/spark/join9.q.out @@ -140,17 +140,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -255,15 +260,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -280,15 +290,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out b/ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out index cde7213..9128317 100644 --- a/ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out +++ b/ql/src/test/results/clientpositive/spark/join_filters_overlap.q.out @@ -109,18 +109,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false @@ -133,7 +133,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -141,8 +140,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -153,7 +150,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -161,8 +157,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -177,22 +171,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 50) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 50 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false @@ -205,7 +199,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -213,8 +206,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -225,7 +216,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -233,8 +223,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -249,22 +237,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false @@ -277,7 +265,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -285,8 +272,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -297,7 +282,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -305,8 +289,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -335,13 +317,13 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -480,22 +462,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 50) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 50 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false @@ -508,7 +490,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -516,8 +497,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -528,7 +507,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -536,8 +514,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -552,18 +528,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false @@ -576,7 +552,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -584,8 +559,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -596,7 +569,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -604,8 +576,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -620,22 +590,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false @@ -648,7 +618,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -656,8 +625,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -668,7 +635,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -676,8 +642,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -706,13 +670,13 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -865,22 +829,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 50) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 50 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false @@ -893,7 +857,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -901,8 +864,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -913,7 +874,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -921,8 +881,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -937,18 +895,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false @@ -961,7 +919,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -969,8 +926,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -981,7 +936,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -989,8 +943,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1005,22 +957,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false @@ -1033,7 +985,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1041,8 +992,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1053,7 +1002,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1061,8 +1009,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1091,13 +1037,13 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 6 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 46 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1264,18 +1210,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false @@ -1288,7 +1234,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1296,8 +1241,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1308,7 +1251,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1316,8 +1258,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1332,18 +1272,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false @@ -1356,7 +1296,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1364,8 +1303,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1376,7 +1313,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1384,8 +1320,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1400,22 +1334,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false @@ -1428,7 +1362,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1436,8 +1369,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1448,7 +1379,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1456,8 +1386,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1472,22 +1400,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 40) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 40 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 3 value expressions: _col1 (type: int) auto parallelism: false @@ -1500,7 +1428,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1508,8 +1435,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1520,7 +1445,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1528,8 +1452,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1562,13 +1484,13 @@ STAGE PLANS: 2 _col0 (type: int) 3 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 9 Data size: 59 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 69 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 9 Data size: 59 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 69 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1726,18 +1648,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Select Operator expressions: key (type: int), value (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE tag: 0 value expressions: _col1 (type: int) auto parallelism: false @@ -1750,7 +1672,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1758,8 +1679,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1770,7 +1689,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1778,8 +1696,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1794,22 +1710,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 50) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 50 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: int) auto parallelism: false @@ -1822,7 +1738,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1830,8 +1745,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1842,7 +1755,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1850,8 +1762,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1866,22 +1776,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 60) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 60 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 2 value expressions: _col1 (type: int) auto parallelism: false @@ -1894,7 +1804,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1902,8 +1811,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1914,7 +1821,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1922,8 +1828,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1938,22 +1842,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 21 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (value = 40) (type: boolean) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), 40 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 6 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE tag: 3 value expressions: _col1 (type: int) auto parallelism: false @@ -1966,7 +1870,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1974,8 +1877,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1986,7 +1887,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments @@ -1994,8 +1894,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.a numFiles 1 - numRows 3 - rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -2027,13 +1925,13 @@ STAGE PLANS: 2 _col0 (type: int) 3 _col0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 9 Data size: 59 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 69 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 9 Data size: 59 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 69 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat diff --git a/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out b/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out index 8e28715..6561c10 100644 --- a/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out +++ b/ql/src/test/results/clientpositive/spark/join_map_ppr.q.out @@ -292,15 +292,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -343,17 +348,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -370,15 +380,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,val2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.dest_j1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest_j1 { string key, string value, string val2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest_j1 @@ -901,17 +916,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out b/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out index 8d6fe33..af99b6c 100644 --- a/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out +++ b/ql/src/test/results/clientpositive/spark/load_dyn_part8.q.out @@ -236,17 +236,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -282,17 +287,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -328,17 +338,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -374,17 +389,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out b/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out index f9225d7..8b33e85 100644 --- a/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/spark/louter_join_ppr.q.out @@ -244,17 +244,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -290,17 +295,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -560,17 +570,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -606,17 +621,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -652,17 +672,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -698,17 +723,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1127,17 +1157,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1173,17 +1208,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1439,17 +1479,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1485,17 +1530,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out b/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out index fcc1dc2..fa2b3ce 100644 --- a/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/spark/mapjoin_mapjoin.q.out @@ -304,17 +304,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -350,17 +355,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -396,17 +406,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -442,17 +457,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out b/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out index c3a3511..ecf99f4 100644 --- a/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out +++ b/ql/src/test/results/clientpositive/spark/multi_insert_lateral_view.q.out @@ -74,23 +74,23 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col5 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -99,41 +99,41 @@ STAGE PLANS: Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col5 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.src_lv1 Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col5 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -142,20 +142,20 @@ STAGE PLANS: Select Operator expressions: array((key + 3),(key + 4)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col5 (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -301,97 +301,97 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Map 5 Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Select Operator expressions: array((key + 3),(key + 4)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Reducer 2 Reduce Operator Tree: @@ -400,10 +400,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -416,10 +416,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -549,63 +549,63 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col5) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: double) Map 5 Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key < 200) or (key > 200)) (type: boolean) - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: string) sort order: + Map-reduce partition columns: key (type: string) - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reducer 2 Reduce Operator Tree: @@ -614,10 +614,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -626,19 +626,19 @@ STAGE PLANS: Reducer 3 Reduce Operator Tree: Forward - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (KEY._col0 > 200) (type: boolean) - Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -646,16 +646,16 @@ STAGE PLANS: name: default.src_lv2 Filter Operator predicate: (KEY._col0 < 200) (type: boolean) - Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -817,130 +817,130 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col5 (type: double), _col0 (type: string) outputColumnNames: _col5, _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col0) keys: _col5 (type: double), _col0 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col5 (type: double), _col0 (type: string) outputColumnNames: _col5, _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col0) keys: _col5 (type: double), _col0 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Map 6 Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col5 (type: double), _col0 (type: string) outputColumnNames: _col5, _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col0) keys: _col5 (type: double), _col0 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: array((key + 3),(key + 4)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col5 (type: double), _col0 (type: string) outputColumnNames: _col5, _col0 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col0) keys: _col5 (type: double), _col0 (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Map 7 Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string), key (type: string) outputColumnNames: value, key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT key) keys: value (type: string), key (type: string) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Group By Operator @@ -948,10 +948,10 @@ STAGE PLANS: keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -964,10 +964,10 @@ STAGE PLANS: keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -980,10 +980,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1175,107 +1175,107 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col5) keys: _col0 (type: string), _col5 (type: double) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: array((key + 1),(key + 2)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col5) keys: _col0 (type: string), _col5 (type: double) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Map 6 Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Forward - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col5) keys: _col0 (type: string), _col5 (type: double) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: array((key + 3),(key + 4)) (type: array) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE UDTF Operator - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE function name: explode Lateral View Join Operator outputColumnNames: _col0, _col5 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT _col5) keys: _col0 (type: string), _col5 (type: double) mode: hash outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 208 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 228 Basic stats: COMPLETE Column stats: NONE Map 7 Map Operator Tree: TableScan alias: src_10 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key < 200) or (key > 200)) (type: boolean) - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string), key (type: string) sort order: ++ Map-reduce partition columns: value (type: string) - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Group By Operator @@ -1283,10 +1283,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1299,10 +1299,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1311,19 +1311,19 @@ STAGE PLANS: Reducer 4 Reduce Operator Tree: Forward - Statistics: Num rows: 6 Data size: 62 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (KEY._col1:0._col0 > 200) (type: boolean) - Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT KEY._col1:0._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1331,16 +1331,16 @@ STAGE PLANS: name: default.src_lv3 Filter Operator predicate: (KEY._col1:0._col0 < 200) (type: boolean) - Statistics: Num rows: 2 Data size: 20 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(DISTINCT KEY._col1:0._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/multi_join_union.q.out b/ql/src/test/results/clientpositive/spark/multi_join_union.q.out index 52b3c74..a0195f4 100644 --- a/ql/src/test/results/clientpositive/spark/multi_join_union.q.out +++ b/ql/src/test/results/clientpositive/spark/multi_join_union.q.out @@ -74,14 +74,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 _col0 (type: string) @@ -99,14 +99,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -116,12 +116,12 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 3 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: + Map-reduce partition columns: _col3 (type: string) - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Local Work: Map Reduce Local Work @@ -129,37 +129,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src13 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 5 Map Operator Tree: TableScan alias: src14 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 1000 Data size: 10624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 11624 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 2 Reduce Operator Tree: @@ -170,10 +170,10 @@ STAGE PLANS: 0 _col3 (type: string) 1 _col1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1100 Data size: 11686 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 63 Data size: 12786 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out index 2d15f65..64bce21 100644 --- a/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out +++ b/ql/src/test/results/clientpositive/spark/optimize_nullscan.q.out @@ -379,17 +379,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -424,17 +429,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -469,17 +479,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -514,17 +529,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -802,17 +822,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -848,17 +873,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -894,17 +924,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -940,17 +975,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1239,17 +1279,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1284,17 +1329,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1329,17 +1379,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1374,17 +1429,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.7.out b/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.7.out index 9b1a503..b5c49c6 100644 --- a/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.7.out +++ b/ql/src/test/results/clientpositive/spark/outer_join_ppr.q.java1.7.out @@ -238,17 +238,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -284,17 +289,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -330,17 +340,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -376,17 +391,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -729,17 +749,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -775,17 +800,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/pcr.q.out b/ql/src/test/results/clientpositive/spark/pcr.q.out index 6345eff..deff564 100644 --- a/ql/src/test/results/clientpositive/spark/pcr.q.out +++ b/ql/src/test/results/clientpositive/spark/pcr.q.out @@ -160,17 +160,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -205,17 +210,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -386,17 +396,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -431,17 +446,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -476,17 +496,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -707,17 +732,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -752,17 +782,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -952,17 +987,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -997,17 +1037,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1199,17 +1244,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1244,17 +1294,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1289,17 +1344,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1502,17 +1562,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1547,17 +1612,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1592,17 +1662,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1804,17 +1879,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1849,17 +1929,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2019,17 +2104,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2064,17 +2154,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2274,17 +2369,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2319,17 +2419,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2364,17 +2469,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2618,17 +2728,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2663,17 +2778,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2854,17 +2974,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2927,17 +3052,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3171,17 +3301,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3244,17 +3379,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3506,17 +3646,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3551,17 +3696,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3596,17 +3746,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3641,17 +3796,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3880,17 +4040,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3925,17 +4090,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3970,17 +4140,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4182,15 +4357,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4212,15 +4392,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4260,17 +4445,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4287,15 +4477,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t2 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t2 @@ -4313,15 +4508,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t3 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct pcr_t3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t3 @@ -4542,17 +4742,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.pcr_t1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4748,17 +4953,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -4942,17 +5152,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -4988,17 +5203,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5176,17 +5396,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5222,17 +5447,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out b/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out index 4efa206..ad44968 100644 --- a/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out +++ b/ql/src/test/results/clientpositive/spark/router_join_ppr.q.out @@ -244,17 +244,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -290,17 +295,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -336,17 +346,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -382,17 +397,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -663,17 +683,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -709,17 +734,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1127,17 +1157,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1173,17 +1208,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1443,17 +1483,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1489,17 +1534,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/semijoin.q.out b/ql/src/test/results/clientpositive/spark/semijoin.q.out index 085257e..fdd4e46 100644 --- a/ql/src/test/results/clientpositive/spark/semijoin.q.out +++ b/ql/src/test/results/clientpositive/spark/semijoin.q.out @@ -144,38 +144,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -185,20 +185,20 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -246,38 +246,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -287,20 +287,20 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 24 Data size: 99 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -350,15 +350,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 4 Map Operator Tree: @@ -391,20 +391,20 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -446,38 +446,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key < 15) (type: boolean) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col1 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col1 (type: int), _col1 (type: int) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -487,24 +487,24 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -557,38 +557,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 22 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -598,20 +598,20 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -656,37 +656,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t3 - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 5) (type: boolean) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 60 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reducer 2 Reduce Operator Tree: @@ -697,24 +697,24 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 66 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -759,37 +759,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reducer 2 Reduce Operator Tree: @@ -800,24 +800,24 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 99 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -859,37 +859,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 28 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Reducer 2 Reduce Operator Tree: @@ -900,20 +900,20 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 30 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 30 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 30 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 30 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -958,19 +958,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 key (type: int) @@ -988,10 +988,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -1001,11 +1001,11 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 203 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 203 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reducer 2 @@ -1013,10 +1013,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 203 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 203 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1077,38 +1077,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: (2 * _col0) (type: int) sort order: + Map-reduce partition columns: (2 * _col0) (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -1118,20 +1118,20 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 86 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1177,52 +1177,52 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -1234,25 +1234,25 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: string), _col5 (type: int), _col6 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: int), _col3 (type: string) Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1310,37 +1310,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int), value (type: string) sort order: ++ Map-reduce partition columns: key (type: int), value (type: string) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: int), _col1 (type: string) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -1350,20 +1350,20 @@ STAGE PLANS: 0 key (type: int), value (type: string) 1 _col0 (type: int), _col1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1417,19 +1417,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 key (type: int) @@ -1441,19 +1441,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Spark HashTable Sink Operator keys: 0 key (type: int) @@ -1472,10 +1472,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -1488,11 +1488,11 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reducer 2 @@ -1500,10 +1500,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1561,41 +1561,41 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -1607,20 +1607,20 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1690,41 +1690,41 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -1736,20 +1736,20 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1822,41 +1822,41 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -1868,20 +1868,20 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1954,41 +1954,41 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -2000,20 +2000,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2086,41 +2086,41 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -2132,20 +2132,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2220,41 +2220,41 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 185 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 22 Data size: 90 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -2266,20 +2266,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 358 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 101 Data size: 407 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2366,48 +2366,48 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: string) Map 5 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 95 Basic stats: COMPLETE Column stats: NONE Map 6 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 11 Data size: 79 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 90 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -2417,12 +2417,12 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 24 Data size: 179 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 104 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Reducer 3 Reduce Operator Tree: @@ -2433,20 +2433,20 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 196 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 196 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reducer 4 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 196 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 114 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 196 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 27 Data size: 114 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2524,42 +2524,42 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 22 Data size: 163 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 100) and value is not null) (type: boolean) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 7 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 84 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 95 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Join Operator @@ -2569,10 +2569,10 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 92 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 203 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/temp_table_join1.q.out b/ql/src/test/results/clientpositive/spark/temp_table_join1.q.out index b5742ad..b270e7b 100644 --- a/ql/src/test/results/clientpositive/spark/temp_table_join1.q.out +++ b/ql/src/test/results/clientpositive/spark/temp_table_join1.q.out @@ -49,36 +49,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -89,14 +89,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -153,36 +153,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -193,14 +193,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -259,36 +259,36 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -299,14 +299,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out b/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out index 8b8bff6..dd3cd81 100644 --- a/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out +++ b/ql/src/test/results/clientpositive/spark/transform_ppr1.q.out @@ -176,17 +176,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -222,17 +227,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -268,17 +278,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -314,17 +329,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out b/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out index 33f0d3e..635c949 100644 --- a/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out +++ b/ql/src/test/results/clientpositive/spark/transform_ppr2.q.out @@ -178,17 +178,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -224,17 +229,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/union22.q.out b/ql/src/test/results/clientpositive/spark/union22.q.out index cc3c557..a66033a 100644 --- a/ql/src/test/results/clientpositive/spark/union22.q.out +++ b/ql/src/test/results/clientpositive/spark/union22.q.out @@ -288,17 +288,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -336,17 +341,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -386,17 +396,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -449,17 +464,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -501,17 +521,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -530,17 +555,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 diff --git a/ql/src/test/results/clientpositive/spark/union24.q.out b/ql/src/test/results/clientpositive/spark/union24.q.out index 3bdc503..91d440f 100644 --- a/ql/src/test/results/clientpositive/spark/union24.q.out +++ b/ql/src/test/results/clientpositive/spark/union24.q.out @@ -202,22 +202,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -243,7 +243,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -251,8 +250,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 2 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -263,7 +260,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -271,8 +267,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 2 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -287,22 +281,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -328,7 +322,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -336,8 +329,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -348,7 +339,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -356,8 +346,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -372,22 +360,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src4 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -413,7 +401,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -421,8 +408,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -433,7 +418,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -441,8 +425,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -457,28 +439,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src5 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) auto parallelism: false @@ -491,7 +473,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -499,8 +480,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -511,7 +490,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -519,8 +497,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -539,13 +515,13 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 51 Data size: 244 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 210 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -767,22 +743,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -808,7 +784,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -816,8 +791,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 2 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -828,7 +801,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -836,8 +808,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 2 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -852,22 +822,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -893,7 +863,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -901,8 +870,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -913,7 +880,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -921,8 +887,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -937,22 +901,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: 0 auto parallelism: false Path -> Alias: @@ -964,7 +928,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -972,8 +935,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -984,7 +945,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -992,8 +952,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1008,22 +966,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: bigint) auto parallelism: false @@ -1036,7 +994,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1044,8 +1001,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1056,7 +1011,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1064,8 +1018,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1083,17 +1035,17 @@ STAGE PLANS: condition map: Inner Join 0 to 1 outputColumnNames: _col0, _col2 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1310,22 +1262,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1351,7 +1303,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1359,8 +1310,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 2 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1371,7 +1320,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1379,8 +1327,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 2 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1395,22 +1341,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1436,7 +1382,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1444,8 +1389,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1456,7 +1399,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1464,8 +1406,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1480,22 +1420,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: 0 auto parallelism: false Path -> Alias: @@ -1507,7 +1447,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1515,8 +1454,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1527,7 +1464,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1535,8 +1471,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1551,22 +1485,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: 1 auto parallelism: false Path -> Alias: @@ -1578,7 +1512,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1586,8 +1519,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1598,7 +1529,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1606,8 +1536,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1628,19 +1556,19 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) auto parallelism: false @@ -1652,13 +1580,13 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 56 Data size: 269 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat diff --git a/ql/src/test/results/clientpositive/spark/union27.q.out b/ql/src/test/results/clientpositive/spark/union27.q.out index bccbbb1..2ae1728 100644 --- a/ql/src/test/results/clientpositive/spark/union27.q.out +++ b/ql/src/test/results/clientpositive/spark/union27.q.out @@ -55,54 +55,54 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: dim_pho - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Map 4 Map Operator Tree: TableScan alias: jackson_sev_add - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -113,14 +113,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col1, _col2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/union31.q.out b/ql/src/test/results/clientpositive/spark/union31.q.out index bbe49aa..1c3d40f 100644 --- a/ql/src/test/results/clientpositive/spark/union31.q.out +++ b/ql/src/test/results/clientpositive/spark/union31.q.out @@ -91,101 +91,101 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Map 6 Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Map 7 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Map 8 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 2 Reduce Operator Tree: @@ -194,14 +194,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -214,14 +214,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -374,43 +374,43 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Map 4 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 2 Reduce Operator Tree: @@ -419,30 +419,30 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 3 Reduce Operator Tree: Forward - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -453,14 +453,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -473,12 +473,12 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Stage: Stage-0 @@ -668,41 +668,41 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Map 4 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Group By Operator @@ -710,37 +710,37 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Forward - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -751,14 +751,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/union32.q.out b/ql/src/test/results/clientpositive/spark/union32.q.out index 1ec7e64..465593d 100644 --- a/ql/src/test/results/clientpositive/spark/union32.q.out +++ b/ql/src/test/results/clientpositive/spark/union32.q.out @@ -63,37 +63,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(key)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -170,48 +170,48 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -222,14 +222,14 @@ STAGE PLANS: condition map: Inner Join 0 to 1 outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -318,14 +318,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -334,50 +334,50 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -466,48 +466,48 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double), key (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -518,14 +518,14 @@ STAGE PLANS: condition map: Inner Join 0 to 1 outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(UDFToDouble(_col1)) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -614,14 +614,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -630,50 +630,50 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Join Operator condition map: Inner Join 0 to 1 outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToDouble(_col1) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/union_ppr.q.out b/ql/src/test/results/clientpositive/spark/union_ppr.q.out index 1d2120a..5afbf9d 100644 --- a/ql/src/test/results/clientpositive/spark/union_ppr.q.out +++ b/ql/src/test/results/clientpositive/spark/union_ppr.q.out @@ -182,17 +182,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -228,17 +233,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -305,17 +315,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -351,17 +366,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out index cfdfce1..8ace0f7 100644 --- a/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out @@ -55,22 +55,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_vgby - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) outputColumnNames: cint, cdecimal1, cdecimal2 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() keys: cint (type: int) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)), _col9 (type: bigint) Execution mode: vectorized Reducer 2 @@ -81,17 +81,17 @@ STAGE PLANS: keys: KEY._col0 (type: int) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 280 Data size: 63891 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col9 > 1) (type: boolean) - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -160,22 +160,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_vgby - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) outputColumnNames: cint, cdecimal1, cdecimal2 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), avg(cdecimal1), stddev_pop(cdecimal1), stddev_samp(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), avg(cdecimal2), stddev_pop(cdecimal2), stddev_samp(cdecimal2), count() keys: cint (type: int) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 (type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: struct), _col13 (type: struct), _col14 (type: struct), _col15 (type: bigint) Execution mode: vectorized Reducer 2 @@ -185,17 +185,17 @@ STAGE PLANS: keys: KEY._col0 (type: int) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 280 Data size: 63891 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col15 > 1) (type: boolean) - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: decimal(24,14)), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 (type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: decimal(27,18)), _col13 (type: double), _col14 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14 - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out index 7caa50d..64f0ff6 100644 --- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out @@ -3059,9 +3059,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 951 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 951 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash @@ -3124,11 +3124,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(ctinyint) mode: hash @@ -3191,11 +3191,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int) outputColumnNames: cint - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cint) mode: hash @@ -3258,11 +3258,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cfloat (type: float) outputColumnNames: cfloat - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cfloat) mode: hash @@ -3325,11 +3325,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cstring1 (type: string) outputColumnNames: cstring1 - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cstring1) mode: hash @@ -3392,11 +3392,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cboolean1 (type: boolean) outputColumnNames: cboolean1 - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cboolean1) mode: hash diff --git a/ql/src/test/results/clientpositive/stats0.q.out b/ql/src/test/results/clientpositive/stats0.q.out index 99326fc..274f583 100644 --- a/ql/src/test/results/clientpositive/stats0.q.out +++ b/ql/src/test/results/clientpositive/stats0.q.out @@ -59,15 +59,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned @@ -133,15 +138,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned @@ -1379,15 +1389,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned @@ -1462,15 +1477,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned @@ -1493,15 +1513,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned @@ -1517,30 +1542,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned @@ -1562,15 +1597,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned @@ -1586,30 +1626,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.stats_non_partitioned + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct stats_non_partitioned { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.stats_non_partitioned diff --git a/ql/src/test/results/clientpositive/stats1.q.out b/ql/src/test/results/clientpositive/stats1.q.out index 72c53e3..ac076ec 100644 --- a/ql/src/test/results/clientpositive/stats1.q.out +++ b/ql/src/test/results/clientpositive/stats1.q.out @@ -232,6 +232,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 3 + numRows 26 + rawDataSize 199 totalSize 1583 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/stats11.q.out b/ql/src/test/results/clientpositive/stats11.q.out index 7598dc0..7af3921 100644 --- a/ql/src/test/results/clientpositive/stats11.q.out +++ b/ql/src/test/results/clientpositive/stats11.q.out @@ -88,6 +88,8 @@ Table: srcbucket_mapjoin_part #### A masked pattern was here #### Partition Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 1358 #### A masked pattern was here #### @@ -132,6 +134,8 @@ Table: srcbucket_mapjoin_part #### A masked pattern was here #### Partition Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 2750 #### A masked pattern was here #### @@ -176,6 +180,8 @@ Table: srcbucket_mapjoin_part #### A masked pattern was here #### Partition Parameters: numFiles 3 + numRows 0 + rawDataSize 0 totalSize 4200 #### A masked pattern was here #### @@ -220,6 +226,8 @@ Table: srcbucket_mapjoin_part #### A masked pattern was here #### Partition Parameters: numFiles 4 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### @@ -384,8 +392,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -471,15 +481,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -505,6 +520,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -523,6 +540,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -552,15 +571,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -583,15 +607,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -607,30 +636,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -652,15 +691,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -676,30 +720,40 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value1,value2 columns.comments columns.types string:string:string #### A masked pattern was here #### name default.bucketmapjoin_tmp_result + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucketmapjoin_tmp_result @@ -1001,8 +1055,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.srcbucket_mapjoin_part numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/stats18.q.out b/ql/src/test/results/clientpositive/stats18.q.out index 6971e44..3ad9679 100644 --- a/ql/src/test/results/clientpositive/stats18.q.out +++ b/ql/src/test/results/clientpositive/stats18.q.out @@ -94,6 +94,8 @@ Table: stats_part #### A masked pattern was here #### Partition Parameters: numFiles 2 + numRows 500 + rawDataSize 5312 totalSize 7170 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/stats20.q.out b/ql/src/test/results/clientpositive/stats20.q.out index d7e52b4..a824bc9 100644 --- a/ql/src/test/results/clientpositive/stats20.q.out +++ b/ql/src/test/results/clientpositive/stats20.q.out @@ -55,6 +55,45 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 +PREHOOK: query: describe formatted stats_partitioned partition (ds='1') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats_partitioned +POSTHOOK: query: describe formatted stats_partitioned partition (ds='1') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats_partitioned +# col_name data_type comment + +key string +value string + +# Partition Information +# col_name data_type comment + +ds string + +# Detailed Partition Information +Partition Value: [1] +Database: default +Table: stats_partitioned +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 PREHOOK: query: insert overwrite table stats_partitioned partition (ds='1') select * from src PREHOOK: type: QUERY @@ -104,3 +143,42 @@ Bucket Columns: [] Sort Columns: [] Storage Desc Params: serialization.format 1 +PREHOOK: query: describe formatted stats_partitioned partition (ds='1') +PREHOOK: type: DESCTABLE +PREHOOK: Input: default@stats_partitioned +POSTHOOK: query: describe formatted stats_partitioned partition (ds='1') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: default@stats_partitioned +# col_name data_type comment + +key string +value string + +# Partition Information +# col_name data_type comment + +ds string + +# Detailed Partition Information +Partition Value: [1] +Database: default +Table: stats_partitioned +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 1 + numRows 500 + rawDataSize 0 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 diff --git a/ql/src/test/results/clientpositive/stats3.q.out b/ql/src/test/results/clientpositive/stats3.q.out index 0d8cbbd..a51e5df 100644 --- a/ql/src/test/results/clientpositive/stats3.q.out +++ b/ql/src/test/results/clientpositive/stats3.q.out @@ -45,15 +45,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns col1 columns.comments columns.types string #### A masked pattern was here #### name default.hive_test_src + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct hive_test_src { string col1} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.hive_test_src @@ -87,6 +92,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 11 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/stats5.q.out b/ql/src/test/results/clientpositive/stats5.q.out index 74ddadb..d993f2d 100644 --- a/ql/src/test/results/clientpositive/stats5.q.out +++ b/ql/src/test/results/clientpositive/stats5.q.out @@ -24,7 +24,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: analyze_src - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 5812 Basic stats: COMPLETE Column stats: COMPLETE Stage: Stage-1 Stats-Aggr Operator diff --git a/ql/src/test/results/clientpositive/temp_table.q.out b/ql/src/test/results/clientpositive/temp_table.q.out index 8aedfa1..f61f007 100644 --- a/ql/src/test/results/clientpositive/temp_table.q.out +++ b/ql/src/test/results/clientpositive/temp_table.q.out @@ -242,28 +242,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: foo - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2040 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2040 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -307,45 +307,45 @@ STAGE PLANS: Map Operator Tree: TableScan alias: foo - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: string) TableScan alias: bar - Statistics: Num rows: 253 Data size: 2703 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2956 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 253 Data size: 2703 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2956 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: string) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2070 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2070 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/temp_table_join1.q.out b/ql/src/test/results/clientpositive/temp_table_join1.q.out index aafc263..ff15a0a 100644 --- a/ql/src/test/results/clientpositive/temp_table_join1.q.out +++ b/ql/src/test/results/clientpositive/temp_table_join1.q.out @@ -44,34 +44,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -81,14 +81,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -140,34 +140,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE TableScan alias: src2 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -177,14 +177,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -238,34 +238,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE TableScan alias: src1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -275,14 +275,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out index bd6db7f..23a2ad8 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_1.q.out @@ -195,8 +195,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -207,6 +209,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -215,11 +218,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -283,8 +290,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -295,6 +304,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -303,11 +313,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -328,8 +342,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -340,6 +356,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -348,11 +365,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -520,8 +541,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -532,6 +555,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -540,11 +564,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -565,8 +593,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -577,6 +607,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -585,11 +616,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -638,8 +673,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -650,6 +687,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -658,11 +696,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -829,8 +871,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -841,6 +885,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -849,11 +894,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -874,8 +923,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -886,6 +937,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -894,11 +946,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -947,8 +1003,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -959,6 +1017,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -967,11 +1026,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out index 04b48a2..ca1cd98 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_11.q.out @@ -191,8 +191,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -203,6 +205,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -210,11 +213,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -278,8 +285,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -290,6 +299,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -297,11 +307,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -322,8 +336,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -334,6 +350,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -341,11 +358,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -505,8 +526,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -517,6 +540,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -524,11 +548,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -592,8 +620,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -604,6 +634,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -611,11 +642,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -636,8 +671,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -648,6 +685,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -655,11 +693,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -814,8 +856,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -826,6 +870,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -833,11 +878,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -897,8 +946,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -909,6 +960,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -916,11 +968,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -941,8 +997,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -953,6 +1011,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -960,11 +1019,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1134,8 +1197,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1146,6 +1211,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -1153,11 +1219,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1220,8 +1290,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1232,6 +1304,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1239,11 +1312,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1264,8 +1341,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1276,6 +1355,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1283,11 +1363,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1332,8 +1416,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1344,6 +1430,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1351,11 +1438,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1376,8 +1467,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1388,6 +1481,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 4 bucket_field_name key columns key,value @@ -1395,11 +1489,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out index 8664435..b2edddd 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_12.q.out @@ -257,8 +257,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -269,6 +271,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -277,11 +280,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -329,8 +336,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_medium numFiles 3 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -341,6 +350,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key @@ -349,11 +359,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_medium + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -431,8 +445,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -443,6 +459,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -451,11 +468,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -476,8 +497,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -488,6 +511,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -496,11 +520,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -541,8 +569,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_medium numFiles 3 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -553,6 +583,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 3 bucket_field_name key @@ -561,11 +592,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_medium + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out index a0f2be1..da733a0 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_2.q.out @@ -191,8 +191,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -203,6 +205,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -211,11 +214,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -236,8 +243,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -248,6 +257,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -256,11 +266,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -309,8 +323,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -321,6 +337,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -329,11 +346,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -502,8 +523,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -514,6 +537,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -522,11 +546,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -547,8 +575,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -559,6 +589,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -567,11 +598,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -620,8 +655,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -632,6 +669,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -640,11 +678,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out index e6fb5dc..ec54412 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_3.q.out @@ -175,8 +175,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -187,6 +189,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -195,11 +198,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -220,8 +227,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -232,6 +241,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -240,11 +250,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -309,8 +323,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -321,6 +337,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -329,11 +346,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -500,8 +521,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -512,6 +535,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -520,11 +544,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -572,8 +600,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -584,6 +614,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -592,11 +623,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -617,8 +652,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -629,6 +666,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -637,11 +675,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -809,8 +851,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -821,6 +865,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -829,11 +874,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -881,8 +930,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -893,6 +944,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -901,11 +953,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -926,8 +982,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -938,6 +996,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -946,11 +1005,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out index b24c2769..5a1682e 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_4.q.out @@ -191,8 +191,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -203,6 +205,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -211,11 +214,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -236,8 +243,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -248,6 +257,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -256,11 +266,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -325,8 +339,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -337,6 +353,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -345,11 +362,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -516,8 +537,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -528,6 +551,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -536,11 +560,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -588,8 +616,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -600,6 +630,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -608,11 +639,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -633,8 +668,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -645,6 +682,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -653,11 +691,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -825,8 +867,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -837,6 +881,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -845,11 +890,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -897,8 +946,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -909,6 +960,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -917,11 +969,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -942,8 +998,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -954,6 +1012,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -962,11 +1021,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out index db7b805..99d32c6 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_5.q.out @@ -151,6 +151,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -170,6 +172,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -230,6 +234,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -249,6 +255,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -386,6 +394,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -405,6 +415,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -465,6 +477,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -484,6 +498,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -647,6 +663,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -666,6 +684,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -716,6 +736,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -735,6 +757,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out index faf20fa..9fbaafe 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_7.q.out @@ -208,8 +208,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -220,6 +222,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -228,11 +231,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -253,8 +260,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -265,6 +274,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -273,11 +283,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -342,8 +356,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -354,6 +370,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -362,11 +379,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -387,8 +408,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -399,6 +422,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -407,11 +431,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -581,8 +609,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -593,6 +623,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -601,11 +632,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -626,8 +661,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -638,6 +675,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -646,11 +684,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -699,8 +741,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -711,6 +755,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -719,11 +764,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -744,8 +793,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -756,6 +807,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -764,11 +816,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -938,8 +994,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -950,6 +1008,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -958,11 +1017,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -983,8 +1046,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -995,6 +1060,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1003,11 +1069,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1056,8 +1126,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1068,6 +1140,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1076,11 +1149,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1101,8 +1178,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1113,6 +1192,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1121,11 +1201,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small diff --git a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out index 7b51ffc..c3fcb2a 100644 --- a/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out +++ b/ql/src/test/results/clientpositive/tez/auto_sortmerge_join_8.q.out @@ -208,8 +208,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -220,6 +222,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -228,11 +231,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -253,8 +260,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -265,6 +274,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -273,11 +283,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -342,8 +356,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -354,6 +370,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -362,11 +379,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -387,8 +408,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -399,6 +422,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -407,11 +431,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -581,8 +609,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -593,6 +623,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -601,11 +632,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -626,8 +661,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -638,6 +675,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -646,11 +684,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -699,8 +741,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -711,6 +755,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -719,11 +764,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -744,8 +793,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -756,6 +807,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -764,11 +816,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -940,8 +996,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -952,6 +1010,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -960,11 +1019,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -985,8 +1048,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_big numFiles 4 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -997,6 +1062,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 4 bucket_field_name key @@ -1005,11 +1071,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_big + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1058,8 +1128,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1070,6 +1142,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1078,11 +1151,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1103,8 +1180,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.bucket_small numFiles 2 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1115,6 +1194,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -1123,11 +1203,15 @@ STAGE PLANS: columns.types string:string #### A masked pattern was here #### name default.bucket_small + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small diff --git a/ql/src/test/results/clientpositive/tez/bucket2.q.out b/ql/src/test/results/clientpositive/tez/bucket2.q.out index 151b0ce..b89b66f 100644 --- a/ql/src/test/results/clientpositive/tez/bucket2.q.out +++ b/ql/src/test/results/clientpositive/tez/bucket2.q.out @@ -134,6 +134,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -141,9 +142,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 @@ -163,6 +168,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -170,9 +176,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 diff --git a/ql/src/test/results/clientpositive/tez/bucket3.q.out b/ql/src/test/results/clientpositive/tez/bucket3.q.out index 3308a8a..bad5e1e 100644 --- a/ql/src/test/results/clientpositive/tez/bucket3.q.out +++ b/ql/src/test/results/clientpositive/tez/bucket3.q.out @@ -139,6 +139,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -146,11 +147,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket3_1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 @@ -172,6 +177,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -179,11 +185,15 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket3_1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct bucket3_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket3_1 diff --git a/ql/src/test/results/clientpositive/tez/bucket4.q.out b/ql/src/test/results/clientpositive/tez/bucket4.q.out index f2030e8..c56c683 100644 --- a/ql/src/test/results/clientpositive/tez/bucket4.q.out +++ b/ql/src/test/results/clientpositive/tez/bucket4.q.out @@ -131,6 +131,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -139,9 +140,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket4_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket4_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket4_1 @@ -161,6 +166,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} SORTBUCKETCOLSPREFIX TRUE bucket_count 2 bucket_field_name key @@ -169,9 +175,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket4_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket4_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket4_1 diff --git a/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out b/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out index 46c285e..18314b5 100644 --- a/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out +++ b/ql/src/test/results/clientpositive/tez/column_names_with_leading_and_trailing_spaces.q.out @@ -25,6 +25,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out b/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out index 470590a..82411b6 100644 --- a/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out +++ b/ql/src/test/results/clientpositive/tez/cross_product_check_1.q.out @@ -53,27 +53,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -84,10 +84,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -121,50 +121,50 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Map 4 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Map 5 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -175,10 +175,10 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) Reducer 3 Reduce Operator Tree: @@ -189,10 +189,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -233,49 +233,49 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Map 3 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Map 6 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -285,10 +285,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -302,27 +302,27 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reducer 5 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Stage: Stage-0 @@ -355,38 +355,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Map 3 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Map 6 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -396,10 +396,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -413,27 +413,27 @@ STAGE PLANS: 0 1 outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reducer 5 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Stage: Stage-0 @@ -470,65 +470,65 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: key (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Map 7 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 3 Reduce Operator Tree: @@ -539,10 +539,10 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -556,27 +556,27 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 114 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reducer 6 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 51 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 125 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Stage: Stage-0 diff --git a/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out b/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out index 68df37d..e7bb59f 100644 --- a/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out +++ b/ql/src/test/results/clientpositive/tez/cross_product_check_2.q.out @@ -53,11 +53,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -67,10 +67,10 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -79,14 +79,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string) Stage: Stage-0 @@ -117,14 +117,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -134,39 +134,39 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 2 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator sort order: - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string) Map 2 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Map 3 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -176,10 +176,10 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 input vertices: 0 Map 1 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -220,11 +220,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -234,10 +234,10 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 input vertices: 1 Reducer 3 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -246,14 +246,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -263,45 +263,45 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Stage: Stage-0 @@ -334,11 +334,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -348,10 +348,10 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 input vertices: 1 Reducer 3 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 6393 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -360,11 +360,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -374,37 +374,37 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator sort order: - Statistics: Num rows: 10 Data size: 40 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column stats: COMPLETE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Stage: Stage-0 @@ -440,33 +440,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: key (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -476,42 +476,42 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 5 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 105 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: d1 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 96 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 106 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -521,10 +521,10 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Reducer 4 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -535,10 +535,10 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 5 Data size: 47 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Stage: Stage-0 diff --git a/ql/src/test/results/clientpositive/tez/ctas.q.out b/ql/src/test/results/clientpositive/tez/ctas.q.out index 52ea981..cef881d 100644 --- a/ql/src/test/results/clientpositive/tez/ctas.q.out +++ b/ql/src/test/results/clientpositive/tez/ctas.q.out @@ -155,10 +155,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### @@ -307,10 +304,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### @@ -459,10 +453,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 120 totalSize 199 #### A masked pattern was here #### @@ -523,10 +514,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 120 totalSize 199 #### A masked pattern was here #### @@ -676,10 +664,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 10 - rawDataSize 96 totalSize 106 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out b/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out index 65f750b..42d6107 100644 --- a/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out +++ b/ql/src/test/results/clientpositive/tez/disable_merge_for_bucketing.q.out @@ -130,6 +130,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -137,9 +138,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 @@ -159,6 +164,7 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count 2 bucket_field_name key columns key,value @@ -166,9 +172,13 @@ STAGE PLANS: columns.types int:string #### A masked pattern was here #### name default.bucket2_1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct bucket2_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket2_1 diff --git a/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out index 2626768..49d66a9 100644 --- a/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/tez/dynamic_partition_pruning.q.out @@ -234,33 +234,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -357,19 +357,19 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -480,33 +480,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: day(_col0) (type: int) sort order: + Map-reduce partition columns: day(_col0) (type: int) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: day(_col0) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: day(ds) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -603,19 +603,19 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: day(_col0) (type: int) sort order: + Map-reduce partition columns: day(_col0) (type: int) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -716,66 +716,66 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Map 6 Map Operator Tree: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -895,37 +895,37 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Map 6 Map Operator Tree: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -1052,48 +1052,48 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col2 (type: string) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -1190,19 +1190,19 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col2 (type: string) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -1311,33 +1311,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -1434,19 +1434,19 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -1553,33 +1553,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: UDFToDouble(hr) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -1676,33 +1676,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: (UDFToDouble(hr) * 2.0) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -1799,19 +1799,19 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -1907,19 +1907,19 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -2028,33 +2028,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToString(_col0) (type: string) sort order: + Map-reduce partition columns: UDFToString(_col0) (type: string) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToString(_col0) (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: UDFToString((UDFToDouble(hr) * 2.0)) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -2314,17 +2314,17 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0)) (type: boolean) - Statistics: Num rows: 4 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0)) (type: boolean) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col2 (type: string) Reducer 2 Reduce Operator Tree: @@ -2429,48 +2429,48 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col2 (type: string) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -2569,33 +2569,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -2658,33 +2658,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 4 Map 4 Map Operator Tree: @@ -2777,33 +2777,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -2886,52 +2886,52 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Map 6 Map Operator Tree: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and (UDFToDouble(hr) = 11.0)) (type: boolean) - Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and (UDFToDouble(hr) = 11.0)) (type: boolean) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -3049,37 +3049,37 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Map 6 Map Operator Tree: TableScan alias: srcpart_hour filterExpr: (UDFToDouble(hr) = 13.0) (type: boolean) - Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(hr) = 13.0) (type: boolean) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -3863,33 +3863,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -3997,33 +3997,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: day(_col0) (type: int) sort order: + Map-reduce partition columns: day(_col0) (type: int) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: day(_col0) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: day(ds) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -4130,66 +4130,66 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Map 4 Map Operator Tree: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -4298,48 +4298,48 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col2 (type: string) - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 27 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -4445,33 +4445,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -4553,33 +4553,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: UDFToDouble(hr) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -4672,33 +4672,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: (UDFToDouble(hr) * 2.0) - Statistics: Num rows: 1 Data size: 7 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 16 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -4951,33 +4951,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: @@ -5022,14 +5022,14 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -5122,14 +5122,14 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Right Outer Join0 to 1 @@ -5235,52 +5235,52 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Map 4 Map Operator Tree: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and (UDFToDouble(hr) = 11.0)) (type: boolean) - Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and (UDFToDouble(hr) = 11.0)) (type: boolean) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Group By Operator @@ -5364,14 +5364,14 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 42 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 21 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 44 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -5407,19 +5407,19 @@ STAGE PLANS: TableScan alias: srcpart_hour filterExpr: (UDFToDouble(hr) = 13.0) (type: boolean) - Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(hr) = 13.0) (type: boolean) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator @@ -5735,48 +5735,48 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: (((date = '2008-04-08') or (date = '2008-04-09')) and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 108 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((date = '2008-04-08') or (date = '2008-04-09')) and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), UDFToDouble(_col2) (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string), UDFToDouble(_col2) (type: double) - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart_orc Partition key expr: ds - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: UDFToDouble(_col2) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (int) Target Input: srcpart_orc Partition key expr: UDFToDouble(hr) - Statistics: Num rows: 2 Data size: 54 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: diff --git a/ql/src/test/results/clientpositive/tez/empty_join.q.out b/ql/src/test/results/clientpositive/tez/empty_join.q.out index 8a0c040..da51631 100644 --- a/ql/src/test/results/clientpositive/tez/empty_join.q.out +++ b/ql/src/test/results/clientpositive/tez/empty_join.q.out @@ -63,7 +63,7 @@ Stage-0 Stage-1 Map 1 File Output Operator [FS_14] - Map Join Operator [MAPJOIN_22] (rows=2 width=1) + Map Join Operator [MAPJOIN_22] (rows=2 width=2) Conds:SEL_2._col0=RS_10._col0(Left Outer),SEL_2._col0=RS_11._col0(Inner),Output:["_col0","_col1","_col2"] <-Map 2 [BROADCAST_EDGE] BROADCAST [RS_10] @@ -77,17 +77,17 @@ Stage-0 <-Map 3 [BROADCAST_EDGE] BROADCAST [RS_11] PartitionCols:_col0 - Select Operator [SEL_8] (rows=1 width=1) + Select Operator [SEL_8] (rows=1 width=2) Output:["_col0"] - Filter Operator [FIL_21] (rows=1 width=1) + Filter Operator [FIL_21] (rows=1 width=2) predicate:id is not null - TableScan [TS_6] (rows=1 width=1) + TableScan [TS_6] (rows=1 width=2) default@test_3,t3,Tbl:COMPLETE,Col:NONE,Output:["id"] - <-Select Operator [SEL_2] (rows=1 width=1) + <-Select Operator [SEL_2] (rows=1 width=2) Output:["_col0"] - Filter Operator [FIL_19] (rows=1 width=1) + Filter Operator [FIL_19] (rows=1 width=2) predicate:id is not null - TableScan [TS_0] (rows=1 width=1) + TableScan [TS_0] (rows=1 width=2) default@test_1,t1,Tbl:COMPLETE,Col:NONE,Output:["id"] PREHOOK: query: SELECT t1.id, t2.id, t3.id diff --git a/ql/src/test/results/clientpositive/tez/explainuser_1.q.out b/ql/src/test/results/clientpositive/tez/explainuser_1.q.out index c70f104..57db7cc 100644 --- a/ql/src/test/results/clientpositive/tez/explainuser_1.q.out +++ b/ql/src/test/results/clientpositive/tez/explainuser_1.q.out @@ -186,9 +186,9 @@ Stage-0 SHUFFLE [RS_5] Group By Operator [GBY_4] (rows=1 width=8) Output:["_col0"],aggregations:["count(1)"] - Select Operator [SEL_2] (rows=1 width=2515) - TableScan [TS_0] (rows=1 width=2515) - default@src_orc_merge_test_part,src_orc_merge_test_part,Tbl:PARTIAL,Col:NONE + Select Operator [SEL_2] (rows=500 width=94) + TableScan [TS_0] (rows=500 width=94) + default@src_orc_merge_test_part,src_orc_merge_test_part,Tbl:COMPLETE,Col:NONE PREHOOK: query: explain select sum(hash(key)), sum(hash(value)) from src_orc_merge_test_part where ds='2012-01-03' and ts='2012-01-03+14:46:31' PREHOOK: type: QUERY @@ -211,9 +211,9 @@ Stage-0 SHUFFLE [RS_5] Group By Operator [GBY_4] (rows=1 width=16) Output:["_col0","_col1"],aggregations:["sum(_col0)","sum(_col1)"] - Select Operator [SEL_2] (rows=24 width=104) + Select Operator [SEL_2] (rows=500 width=94) Output:["_col0","_col1"] - TableScan [TS_0] (rows=24 width=104) + TableScan [TS_0] (rows=500 width=94) default@src_orc_merge_test_part,src_orc_merge_test_part,Tbl:COMPLETE,Col:NONE,Output:["key","value"] PREHOOK: query: drop table src_orc_merge_test_part @@ -3238,8 +3238,8 @@ Stage-0 Output:["_col0"],aggregations:["count(1)"] <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_3] - Select Operator [SEL_1] (rows=1 width=171) - TableScan [TS_0] (rows=1 width=171) + Select Operator [SEL_1] (rows=5 width=6) + TableScan [TS_0] (rows=5 width=6) default@tgt_rc_merge_test,tgt_rc_merge_test,Tbl:COMPLETE,Col:COMPLETE PREHOOK: query: explain select sum(hash(key)), sum(hash(value)) from tgt_rc_merge_test @@ -3261,9 +3261,9 @@ Stage-0 Output:["_col0","_col1"],aggregations:["sum(VALUE._col0)","sum(VALUE._col1)"] <-Map 1 [SIMPLE_EDGE] SHUFFLE [RS_3] - Select Operator [SEL_1] (rows=1 width=171) + Select Operator [SEL_1] (rows=5 width=6) Output:["_col0","_col1"] - TableScan [TS_0] (rows=1 width=171) + TableScan [TS_0] (rows=5 width=6) default@tgt_rc_merge_test,tgt_rc_merge_test,Tbl:COMPLETE,Col:NONE,Output:["key","value"] PREHOOK: query: drop table src_rc_merge_test diff --git a/ql/src/test/results/clientpositive/tez/filter_join_breaktask.q.out b/ql/src/test/results/clientpositive/tez/filter_join_breaktask.q.out index 15cad97..890f27b 100644 --- a/ql/src/test/results/clientpositive/tez/filter_join_breaktask.q.out +++ b/ql/src/test/results/clientpositive/tez/filter_join_breaktask.q.out @@ -202,17 +202,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask @@ -275,17 +280,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask @@ -347,17 +357,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask diff --git a/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out b/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out index a8eb0d3..c3e8e00 100644 --- a/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out +++ b/ql/src/test/results/clientpositive/tez/hybridgrace_hashjoin_1.q.out @@ -1275,14 +1275,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: l - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1101 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cint = 6981) (type: boolean) - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -1292,14 +1292,14 @@ STAGE PLANS: outputColumnNames: _col0, _col2 input vertices: 1 Map 2 - Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 605 Data size: 70215 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: 6981 (type: int), 6981 (type: int), _col0 (type: decimal(20,10)), _col2 (type: decimal(23,14)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 605 Data size: 70215 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 605 Data size: 70215 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1309,17 +1309,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: l - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1101 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cint = 6981) (type: boolean) - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal2 (type: decimal(23,14)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(23,14)) Execution mode: vectorized @@ -1471,14 +1471,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: l - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1101 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cint = 6981) (type: boolean) - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -1488,14 +1488,14 @@ STAGE PLANS: outputColumnNames: _col0, _col2 input vertices: 1 Map 2 - Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 605 Data size: 70215 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: 6981 (type: int), 6981 (type: int), _col0 (type: decimal(20,10)), _col2 (type: decimal(23,14)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 605 Data size: 70215 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6758 Data size: 1190783 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 605 Data size: 70215 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1505,17 +1505,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: l - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1101 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cint = 6981) (type: boolean) - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal2 (type: decimal(23,14)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 550 Data size: 63832 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: decimal(23,14)) Execution mode: vectorized diff --git a/ql/src/test/results/clientpositive/tez/llap_nullscan.q.out b/ql/src/test/results/clientpositive/tez/llap_nullscan.q.out index 39f04ea..b8969db 100644 --- a/ql/src/test/results/clientpositive/tez/llap_nullscan.q.out +++ b/ql/src/test/results/clientpositive/tez/llap_nullscan.q.out @@ -63,22 +63,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: false (type: boolean) - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string), ds (type: string), hr (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -105,7 +105,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.OneNullRowInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,ds,hr columns.comments @@ -113,8 +112,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src_orc numFiles 1 - numRows 10 - rawDataSize 3560 serialization.ddl struct src_orc { string key, string value, string ds, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.NullStructSerDe @@ -125,7 +122,6 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.orc.OrcInputFormat output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,ds,hr columns.comments @@ -133,8 +129,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src_orc numFiles 1 - numRows 10 - rawDataSize 3560 serialization.ddl struct src_orc { string key, string value, string ds, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.ql.io.orc.OrcSerde @@ -183,30 +177,30 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 633 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: false (type: boolean) - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 105 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 105 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 105 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized LLAP IO: no inputs Map 3 Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 633 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 633 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 0 Statistics: Num rows: 0 Data size: 0 Basic stats: NONE Column stats: NONE @@ -225,10 +219,10 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 391 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 115 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 391 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 115 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -286,10 +280,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 633 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: false (type: boolean) - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 105 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(key) mode: hash @@ -305,11 +299,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 633 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 6 Data size: 633 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(key) mode: hash @@ -392,15 +386,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: false (type: boolean) - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: string), ds (type: string), hr (type: string) Execution mode: vectorized LLAP IO: no inputs @@ -408,15 +402,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s2 - Statistics: Num rows: 10 Data size: 3560 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: false (type: boolean) - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 633 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: string), ds (type: string), hr (type: string) Execution mode: vectorized LLAP IO: no inputs @@ -429,14 +423,14 @@ STAGE PLANS: 0 value (type: string) 1 value (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col7, _col8, _col9, _col10 - Statistics: Num rows: 1 Data size: 391 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 696 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string), _col3 (type: string), _col7 (type: string), _col8 (type: string), _col9 (type: string), _col10 (type: string) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 1 Data size: 391 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 696 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 391 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 696 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/llapdecider.q.out b/ql/src/test/results/clientpositive/tez/llapdecider.q.out index df90835..9c2e720 100644 --- a/ql/src/test/results/clientpositive/tez/llapdecider.q.out +++ b/ql/src/test/results/clientpositive/tez/llapdecider.q.out @@ -104,22 +104,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(value) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 2 Reduce Operator Tree: @@ -128,21 +128,21 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 1400 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: bigint) sort order: + - Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 1400 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 3 Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 1400 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 1400 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -175,22 +175,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(value) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: llap LLAP IO: no inputs @@ -202,11 +202,11 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 1400 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: bigint) sort order: + - Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 1400 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Reducer 3 Execution mode: uber @@ -214,10 +214,10 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 1400 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 250 Data size: 44000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 1400 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -258,22 +258,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: key, value - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count(value) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 665 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 665 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: bigint) Execution mode: llap LLAP IO: no inputs @@ -285,11 +285,11 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 665 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col1 (type: bigint) sort order: + - Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 665 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string) Reducer 3 Execution mode: uber @@ -297,10 +297,10 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 665 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 205 Data size: 19475 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 7 Data size: 665 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -333,19 +333,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: llap LLAP IO: no inputs @@ -376,21 +376,21 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 36 Data size: 12816 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col3 (type: string) sort order: + - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 36 Data size: 12816 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Reducer 3 Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 36 Data size: 12816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 36 Data size: 12816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -423,19 +423,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: llap LLAP IO: no inputs @@ -443,19 +443,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: llap LLAP IO: no inputs @@ -469,11 +469,11 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col3 (type: string) sort order: + - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Reducer 3 Execution mode: uber @@ -481,10 +481,10 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -517,19 +517,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: llap LLAP IO: no inputs @@ -561,11 +561,11 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 36 Data size: 12816 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col3 (type: string) sort order: + - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 36 Data size: 12816 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Reducer 3 Execution mode: uber @@ -573,10 +573,10 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 36 Data size: 12816 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 36 Data size: 12816 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -609,37 +609,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Map 4 Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -650,21 +650,21 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col3 (type: string) sort order: + - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Reducer 3 Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -697,37 +697,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Map 4 Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -738,21 +738,21 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col3 (type: string) sort order: + - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Reducer 3 Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -785,19 +785,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: llap LLAP IO: no inputs @@ -805,19 +805,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: llap LLAP IO: no inputs @@ -830,21 +830,21 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col3 (type: string) sort order: + - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Reducer 3 Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -877,37 +877,37 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Map 4 Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Reducer 2 Reduce Operator Tree: @@ -918,21 +918,21 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col3 (type: string) sort order: + - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Reducer 3 Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -965,19 +965,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: llap LLAP IO: no inputs @@ -985,19 +985,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: s1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 15 Data size: 2670 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col1 (type: string) Execution mode: llap LLAP IO: no inputs @@ -1011,11 +1011,11 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE Reduce Output Operator key expressions: _col3 (type: string) sort order: + - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE value expressions: _col0 (type: string), _col1 (type: string), _col2 (type: string) Reducer 3 Execution mode: llap @@ -1023,10 +1023,10 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), VALUE._col2 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE File Output Operator compressed: false - Statistics: Num rows: 1219 Data size: 433964 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 356 Basic stats: COMPLETE Column stats: COMPLETE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1064,14 +1064,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 30 Data size: 2610 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (UDFToInteger(key) > 1) (type: boolean) - Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 870 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: (UDFToInteger(key) + 1) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 870 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(_col0) mode: hash @@ -1125,14 +1125,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 30 Data size: 2610 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (UDFToInteger(key) > 1) (type: boolean) - Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 870 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: (UDFToInteger(GenericUDFTestGetJavaString(key)) + 1) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 870 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(_col0) mode: hash @@ -1183,14 +1183,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 30 Data size: 2610 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (UDFToInteger(GenericUDFTestGetJavaString(key)) > 1) (type: boolean) - Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 870 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: (UDFToInteger(key) + 1) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 870 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(_col0) mode: hash @@ -1241,14 +1241,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src_orc - Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 30 Data size: 2610 Basic stats: COMPLETE Column stats: COMPLETE Filter Operator predicate: (UDFToInteger(key) > 1) (type: boolean) - Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 870 Basic stats: COMPLETE Column stats: COMPLETE Select Operator expressions: (UDFToInteger(GenericUDFTestGetJavaString(key)) + 1) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 10 Data size: 870 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: sum(_col0) mode: hash diff --git a/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out b/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out index d03b1cc..ab6a8d3 100644 --- a/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/tez/mapjoin_mapjoin.q.out @@ -163,17 +163,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -209,17 +214,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -255,17 +265,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -301,17 +316,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/tez/metadataonly1.q.out b/ql/src/test/results/clientpositive/tez/metadataonly1.q.out index 1961d1b..71f6653 100644 --- a/ql/src/test/results/clientpositive/tez/metadataonly1.q.out +++ b/ql/src/test/results/clientpositive/tez/metadataonly1.q.out @@ -200,17 +200,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -350,17 +355,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -500,17 +510,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -684,17 +699,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -724,17 +744,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -792,17 +817,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -832,17 +862,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -1082,17 +1117,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1122,17 +1162,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1162,17 +1207,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1329,17 +1379,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1370,17 +1425,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1411,17 +1471,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1567,17 +1632,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -1606,17 +1676,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test1 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct test1 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test1 @@ -1822,17 +1897,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1862,17 +1942,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1902,17 +1987,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1942,17 +2032,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 @@ -1982,17 +2077,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns a,b columns.comments columns.types int:double #### A masked pattern was here #### name default.test2 + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct test2 { i32 a, double b} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test2 diff --git a/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out b/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out index 336a5e9..a2324e4 100644 --- a/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out +++ b/ql/src/test/results/clientpositive/tez/optimize_nullscan.q.out @@ -381,17 +381,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -426,17 +431,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -471,17 +481,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -516,17 +531,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -806,17 +826,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -852,17 +877,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -898,17 +928,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -944,17 +979,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1246,17 +1286,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1291,17 +1336,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1336,17 +1386,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1381,17 +1436,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/tez/sample1.q.out b/ql/src/test/results/clientpositive/tez/sample1.q.out index 9663218..339e5af 100644 --- a/ql/src/test/results/clientpositive/tez/sample1.q.out +++ b/ql/src/test/results/clientpositive/tez/sample1.q.out @@ -93,15 +93,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 @@ -142,17 +147,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -172,15 +182,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value,dt,hr columns.comments columns.types int:string:string:string #### A masked pattern was here #### name default.dest1 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct dest1 { i32 key, string value, string dt, string hr} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dest1 diff --git a/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out b/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out index d396a61..7c664ea 100644 --- a/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out +++ b/ql/src/test/results/clientpositive/tez/schema_evol_stats.q.out @@ -75,6 +75,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -271,6 +277,12 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/tez/stats_noscan_1.q.out b/ql/src/test/results/clientpositive/tez/stats_noscan_1.q.out index 561b263..2e15f40 100644 --- a/ql/src/test/results/clientpositive/tez/stats_noscan_1.q.out +++ b/ql/src/test/results/clientpositive/tez/stats_noscan_1.q.out @@ -103,6 +103,8 @@ Table: analyze_srcpart Partition Parameters: COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### @@ -141,6 +143,8 @@ Table: analyze_srcpart Partition Parameters: COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### @@ -177,7 +181,10 @@ Database: default Table: analyze_srcpart #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### @@ -214,7 +221,10 @@ Database: default Table: analyze_srcpart #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### @@ -365,6 +375,8 @@ Table: analyze_srcpart_partial Partition Parameters: COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### @@ -403,6 +415,8 @@ Table: analyze_srcpart_partial Partition Parameters: COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### @@ -439,7 +453,10 @@ Database: default Table: analyze_srcpart_partial #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### @@ -476,7 +493,10 @@ Database: default Table: analyze_srcpart_partial #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 + numRows 0 + rawDataSize 0 totalSize 5812 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/tez/temp_table.q.out b/ql/src/test/results/clientpositive/tez/temp_table.q.out index 1c10a4e..1b1a32f 100644 --- a/ql/src/test/results/clientpositive/tez/temp_table.q.out +++ b/ql/src/test/results/clientpositive/tez/temp_table.q.out @@ -174,15 +174,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: foo - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: string) Reducer 2 @@ -190,13 +190,13 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2040 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2040 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -248,30 +248,30 @@ STAGE PLANS: Map Operator Tree: TableScan alias: foo - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 247 Data size: 2609 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2856 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: string) Map 4 Map Operator Tree: TableScan alias: bar - Statistics: Num rows: 253 Data size: 2703 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2956 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 253 Data size: 2703 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2956 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col1 (type: string) Reducer 3 @@ -279,13 +279,13 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2070 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 100 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2070 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/tez_dml.q.out b/ql/src/test/results/clientpositive/tez/tez_dml.q.out index 43996cd..27dcc76 100644 --- a/ql/src/test/results/clientpositive/tez/tez_dml.q.out +++ b/ql/src/test/results/clientpositive/tez/tez_dml.q.out @@ -452,14 +452,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tmp_src - Statistics: Num rows: 309 Data size: 2718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 3027 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string), cnt (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 309 Data size: 2718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 3027 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 309 Data size: 2718 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 3027 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out b/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out index 180bcc6..c34bc54 100644 --- a/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out +++ b/ql/src/test/results/clientpositive/tez/tez_join_result_complex.q.out @@ -471,6 +471,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.ct_events_clean numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct ct_events_clean { string contact_event_id, string ce_create_dt, string ce_end_dt, string contact_type, string cnctevs_cd, string contact_mode, string cntvnst_stts_cd, i32 total_transfers, list ce_notes} serialization.format serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -492,6 +494,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.ct_events_clean numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct ct_events_clean { string contact_event_id, string ce_create_dt, string ce_end_dt, string contact_type, string cnctevs_cd, string contact_mode, string cntvnst_stts_cd, i32 total_transfers, list ce_notes} serialization.format serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -573,6 +577,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.service_request_clean numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct service_request_clean { string cnctevn_id, string svcrqst_id, string svcrqst_crt_dts, i32 subject_seq_no, string plan_component, string cust_segment, string cnctyp_cd, string cnctmd_cd, string cnctevs_cd, string svcrtyp_cd, string svrstyp_cd, string cmpltyp_cd, string catsrsn_cd, string apealvl_cd, string cnstnty_cd, string svcrqst_asrqst_ind, string svcrqst_rtnorig_in, string svcrqst_vwasof_dt, string sum_reason_cd, string sum_reason, string crsr_master_claim_index, list svcrqct_cds, string svcrqst_lupdt, timestamp crsr_lupdt, string cntevsds_lupdt, i32 ignore_me, list notes} serialization.format serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -594,6 +600,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.service_request_clean numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct service_request_clean { string cnctevn_id, string svcrqst_id, string svcrqst_crt_dts, i32 subject_seq_no, string plan_component, string cust_segment, string cnctyp_cd, string cnctmd_cd, string cnctevs_cd, string svcrtyp_cd, string svrstyp_cd, string cmpltyp_cd, string catsrsn_cd, string apealvl_cd, string cnstnty_cd, string svcrqst_asrqst_ind, string svcrqst_rtnorig_in, string svcrqst_vwasof_dt, string sum_reason_cd, string sum_reason, string crsr_master_claim_index, list svcrqct_cds, string svcrqst_lupdt, timestamp crsr_lupdt, string cntevsds_lupdt, i32 ignore_me, list notes} serialization.format serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1708,6 +1716,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.ct_events_clean numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct ct_events_clean { string contact_event_id, string ce_create_dt, string ce_end_dt, string contact_type, string cnctevs_cd, string contact_mode, string cntvnst_stts_cd, i32 total_transfers, list ce_notes} serialization.format serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1729,6 +1739,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.ct_events_clean numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct ct_events_clean { string contact_event_id, string ce_create_dt, string ce_end_dt, string contact_type, string cnctevs_cd, string contact_mode, string cntvnst_stts_cd, i32 total_transfers, list ce_notes} serialization.format serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1811,6 +1823,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.service_request_clean numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct service_request_clean { string cnctevn_id, string svcrqst_id, string svcrqst_crt_dts, i32 subject_seq_no, string plan_component, string cust_segment, string cnctyp_cd, string cnctmd_cd, string cnctevs_cd, string svcrtyp_cd, string svrstyp_cd, string cmpltyp_cd, string catsrsn_cd, string apealvl_cd, string cnstnty_cd, string svcrqst_asrqst_ind, string svcrqst_rtnorig_in, string svcrqst_vwasof_dt, string sum_reason_cd, string sum_reason, string crsr_master_claim_index, list svcrqct_cds, string svcrqst_lupdt, timestamp crsr_lupdt, string cntevsds_lupdt, i32 ignore_me, list notes} serialization.format serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1832,6 +1846,8 @@ STAGE PLANS: #### A masked pattern was here #### name default.service_request_clean numFiles 1 + numRows 0 + rawDataSize 0 serialization.ddl struct service_request_clean { string cnctevn_id, string svcrqst_id, string svcrqst_crt_dts, i32 subject_seq_no, string plan_component, string cust_segment, string cnctyp_cd, string cnctmd_cd, string cnctevs_cd, string svcrtyp_cd, string svrstyp_cd, string cmpltyp_cd, string catsrsn_cd, string apealvl_cd, string cnstnty_cd, string svcrqst_asrqst_ind, string svcrqst_rtnorig_in, string svcrqst_vwasof_dt, string sum_reason_cd, string sum_reason, string crsr_master_claim_index, list svcrqct_cds, string svcrqst_lupdt, timestamp crsr_lupdt, string cntevsds_lupdt, i32 ignore_me, list notes} serialization.format serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out b/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out index 92d1f4c..a9f87c6 100644 --- a/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out +++ b/ql/src/test/results/clientpositive/tez/transform_ppr1.q.out @@ -177,17 +177,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -223,17 +228,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -269,17 +279,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -315,17 +330,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out b/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out index c131bbe..a435e0c 100644 --- a/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out +++ b/ql/src/test/results/clientpositive/tez/transform_ppr2.q.out @@ -179,17 +179,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -225,17 +230,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/tez/unionDistinct_1.q.out b/ql/src/test/results/clientpositive/tez/unionDistinct_1.q.out index bef2365..b77a93a 100644 --- a/ql/src/test/results/clientpositive/tez/unionDistinct_1.q.out +++ b/ql/src/test/results/clientpositive/tez/unionDistinct_1.q.out @@ -6683,17 +6683,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -6782,17 +6787,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -6855,17 +6865,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -6892,17 +6907,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -6926,17 +6946,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -8008,27 +8033,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Path -> Alias: @@ -8040,7 +8065,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8048,8 +8072,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8060,7 +8082,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8068,8 +8089,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8084,28 +8103,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src5 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) auto parallelism: true @@ -8118,7 +8137,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8126,8 +8144,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8138,7 +8154,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8146,8 +8161,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8162,27 +8175,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Path -> Alias: @@ -8194,7 +8207,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8202,8 +8214,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8214,7 +8224,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8222,8 +8231,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8238,27 +8245,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src4 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Path -> Alias: @@ -8270,7 +8277,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8278,8 +8284,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8290,7 +8294,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8298,8 +8301,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8318,18 +8319,18 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 51 Data size: 244 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 210 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Reducer 3 @@ -8339,18 +8340,18 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Reducer 5 @@ -8360,18 +8361,18 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Reducer 7 @@ -8381,13 +8382,13 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 77 Data size: 369 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 329 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 77 Data size: 369 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 329 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -8626,27 +8627,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Path -> Alias: @@ -8658,7 +8659,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8666,8 +8666,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8678,7 +8676,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8686,8 +8683,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8702,27 +8697,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Path -> Alias: @@ -8734,7 +8729,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8742,8 +8736,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8754,7 +8746,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8762,8 +8753,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8778,20 +8767,20 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 - Estimated key counts: Map 8 => 103 + Estimated key counts: Map 8 => 5 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -8799,23 +8788,23 @@ STAGE PLANS: input vertices: 1 Map 8 Position of Big Table: 0 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: string), _col2 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Path -> Alias: @@ -8827,7 +8816,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8835,8 +8823,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8847,7 +8833,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8855,8 +8840,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8871,22 +8854,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: bigint) auto parallelism: true @@ -8899,7 +8882,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8907,8 +8889,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8919,7 +8899,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8927,8 +8906,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8946,18 +8923,18 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Reducer 5 @@ -8967,13 +8944,13 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 108 Data size: 518 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 568 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 108 Data size: 518 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 568 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -9209,27 +9186,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Path -> Alias: @@ -9241,7 +9218,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9249,8 +9225,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9261,7 +9235,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9269,8 +9242,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9285,27 +9256,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Path -> Alias: @@ -9317,7 +9288,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9325,8 +9295,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9337,7 +9305,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9345,8 +9312,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9361,20 +9326,20 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 - Estimated key counts: Map 9 => 103 + Estimated key counts: Map 9 => 5 keys: 0 _col0 (type: string) 1 _col0 (type: string) @@ -9382,20 +9347,20 @@ STAGE PLANS: input vertices: 1 Map 9 Position of Big Table: 0 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) auto parallelism: true @@ -9408,7 +9373,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9416,8 +9380,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9428,7 +9390,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9436,8 +9397,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9452,22 +9411,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: 1 auto parallelism: true Path -> Alias: @@ -9479,7 +9438,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9487,8 +9445,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9499,7 +9455,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9507,8 +9462,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9526,18 +9479,18 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Reducer 5 @@ -9547,13 +9500,13 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 79 Data size: 379 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 338 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 79 Data size: 379 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 338 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -9578,18 +9531,18 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 56 Data size: 269 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: true Union 2 @@ -11190,70 +11143,70 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Map 2 Map Operator Tree: TableScan alias: dim_pho - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: jackson_sev_add - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Reducer 4 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -11263,15 +11216,15 @@ STAGE PLANS: outputColumnNames: _col1, _col2 input vertices: 0 Map 1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12548,64 +12501,64 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -12614,18 +12567,18 @@ STAGE PLANS: Select Operator expressions: _col1 (type: string) outputColumnNames: _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 4 Reduce Operator Tree: @@ -12634,14 +12587,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -12802,43 +12755,43 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Map 6 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 2 Reduce Operator Tree: @@ -12847,47 +12800,47 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reducer 4 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reducer 5 Reduce Operator Tree: Forward - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -12898,14 +12851,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -12918,17 +12871,17 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Union 3 Vertex: Union 3 @@ -13126,42 +13079,42 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Map 6 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), cnt (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Group By Operator @@ -13169,50 +13122,50 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reducer 4 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Reducer 5 Reduce Operator Tree: Forward - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -13223,14 +13176,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -13419,60 +13372,60 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(key)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reducer 4 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13540,14 +13493,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -13557,68 +13510,68 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13686,33 +13639,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -13722,49 +13675,49 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 5 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13832,14 +13785,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -13849,68 +13802,68 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 4 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(UDFToDouble(_col1)) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double), _col1 (type: string) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double), key (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double), _col1 (type: string) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: double), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13984,33 +13937,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double), _col1 (type: double) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: double), _col1 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -14020,49 +13973,49 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 5 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToDouble(_col1) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double), _col1 (type: double) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: double), _col1 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: double), KEY._col1 (type: double) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/union_fast_stats.q.out b/ql/src/test/results/clientpositive/tez/union_fast_stats.q.out index 578205e..c767d94 100644 --- a/ql/src/test/results/clientpositive/tez/union_fast_stats.q.out +++ b/ql/src/test/results/clientpositive/tez/union_fast_stats.q.out @@ -176,10 +176,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 4 - numRows 0 - rawDataSize 0 totalSize 4003 #### A masked pattern was here #### @@ -509,10 +506,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 5 - rawDataSize 1069 totalSize 3224 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/tez/vector_between_columns.q.out b/ql/src/test/results/clientpositive/tez/vector_between_columns.q.out index d8f9c8b..2585910 100644 --- a/ql/src/test/results/clientpositive/tez/vector_between_columns.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_between_columns.q.out @@ -64,7 +64,7 @@ POSTHOOK: Output: default@TINT POSTHOOK: Lineage: tint.cint SIMPLE [(tint_txt)tint_txt.FieldSchema(name:cint, type:int, comment:null), ] POSTHOOK: Lineage: tint.rnum SIMPLE [(tint_txt)tint_txt.FieldSchema(name:rnum, type:int, comment:null), ] tint_txt.rnum tint_txt.cint -Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 1' is a cross product +Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 2' is a cross product PREHOOK: query: -- We DO NOT expect the following to vectorized because the BETWEEN range expressions -- are not constants. We currently do not support the range expressions being columns. explain @@ -85,18 +85,32 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 2 (BROADCAST_EDGE) + Map 2 <- Map 1 (BROADCAST_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: tint - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 269 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), cint (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 269 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + sort order: + Statistics: Num rows: 33 Data size: 269 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int), _col1 (type: int) + Execution mode: vectorized + Map 2 + Map Operator Tree: + TableScan + alias: tsint + Statistics: Num rows: 33 Data size: 271 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: rnum (type: int), csint (type: smallint) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 33 Data size: 271 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -105,36 +119,22 @@ STAGE PLANS: 1 outputColumnNames: _col0, _col1, _col2, _col3 input vertices: - 1 Map 2 - Statistics: Num rows: 5 Data size: 39 Basic stats: COMPLETE Column stats: NONE + 0 Map 1 + Statistics: Num rows: 36 Data size: 295 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col1 BETWEEN _col3 AND _col3 (type: boolean) - Statistics: Num rows: 2 Data size: 15 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 147 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col2 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 15 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 147 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 15 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 147 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Map 2 - Map Operator Tree: - TableScan - alias: tsint - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: rnum (type: int), csint (type: smallint) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - sort order: - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int), _col1 (type: smallint) - Execution mode: vectorized Stage: Stage-0 Fetch Operator @@ -142,7 +142,7 @@ STAGE PLANS: Processor Tree: ListSink -Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 1' is a cross product +Warning: Map Join MAPJOIN[11][bigTable=?] in task 'Map 2' is a cross product PREHOOK: query: select tint.rnum, tsint.rnum from tint , tsint where tint.cint between tsint.csint and tsint.csint PREHOOK: type: QUERY PREHOOK: Input: default@tint diff --git a/ql/src/test/results/clientpositive/tez/vector_between_in.q.out b/ql/src/test/results/clientpositive/tez/vector_between_in.q.out index a4cf61a..8d0673b 100644 --- a/ql/src/test/results/clientpositive/tez/vector_between_in.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_between_in.q.out @@ -32,18 +32,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cdate) IN (1969-10-26, 1969-07-14) (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 966 Data size: 54096 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 966 Data size: 54096 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 966 Data size: 54096 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -51,10 +51,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 966 Data size: 54096 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 966 Data size: 54096 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -86,12 +86,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (not (cdate) IN (1969-10-26, 1969-07-14, 1970-01-21)) (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -144,18 +144,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cdecimal1) IN (2365.8945945946, 881.0135135135, -3367.6517567568) (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 483 Data size: 54124 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 483 Data size: 54124 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(20,10)) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 483 Data size: 54124 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -163,10 +163,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 483 Data size: 54124 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 483 Data size: 54124 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -198,12 +198,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (not (cdecimal1) IN (2365.8945945946, 881.0135135135, -3367.6517567568)) (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -256,18 +256,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: cdate BETWEEN 1969-12-30 AND 1970-01-02 (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -275,10 +275,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -310,18 +310,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: cdate NOT BETWEEN 1968-05-01 AND 1971-09-01 (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -329,10 +329,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -364,18 +364,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: cdecimal1 BETWEEN -20 AND 45.9918918919 (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(20,10)) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -383,10 +383,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -418,12 +418,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -725,22 +725,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (cdate) IN (1969-10-26, 1969-07-14) (type: boolean) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: boolean) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Map-reduce partition columns: _col0 (type: boolean) - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reducer 2 @@ -751,10 +751,10 @@ STAGE PLANS: keys: KEY._col0 (type: boolean) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -786,22 +786,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (cdecimal1) IN (2365.8945945946, 881.0135135135, -3367.6517567568) (type: boolean) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: boolean) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Map-reduce partition columns: _col0 (type: boolean) - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reducer 2 @@ -812,10 +812,10 @@ STAGE PLANS: keys: KEY._col0 (type: boolean) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -847,22 +847,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate BETWEEN 1969-12-30 AND 1970-01-02 (type: boolean) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: boolean) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Map-reduce partition columns: _col0 (type: boolean) - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1931 Data size: 108136 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reducer 2 @@ -873,10 +873,10 @@ STAGE PLANS: keys: KEY._col0 (type: boolean) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 54040 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -908,22 +908,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 (type: boolean) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: boolean) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Map-reduce partition columns: _col0 (type: boolean) - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 965 Data size: 108136 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reducer 2 @@ -934,10 +934,10 @@ STAGE PLANS: keys: KEY._col0 (type: boolean) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 482 Data size: 54011 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_char_mapjoin1.q.out b/ql/src/test/results/clientpositive/tez/vector_char_mapjoin1.q.out index 4ee8150..18101e0 100644 --- a/ql/src/test/results/clientpositive/tez/vector_char_mapjoin1.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_char_mapjoin1.q.out @@ -147,14 +147,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -164,31 +164,31 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: char(10)) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: char(10)) sort order: + Map-reduce partition columns: _col1 (type: char(10)) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized Reducer 2 @@ -197,10 +197,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(10)), VALUE._col1 (type: int), VALUE._col2 (type: char(10)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -244,41 +244,22 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 2 <- Map 1 (BROADCAST_EDGE) - Reducer 3 <- Map 2 (SIMPLE_EDGE) + Map 1 <- Map 3 (BROADCAST_EDGE) + Reducer 2 <- Map 1 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: _col1 (type: char(20)) - sort order: + - Map-reduce partition columns: _col1 (type: char(20)) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE - value expressions: _col0 (type: int) - Execution mode: vectorized - Map 2 - Map Operator Tree: - TableScan - alias: b - Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE - Select Operator - expressions: c1 (type: int), c2 (type: char(20)) - outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -287,25 +268,44 @@ STAGE PLANS: 1 _col1 (type: char(20)) outputColumnNames: _col0, _col1, _col2, _col3 input vertices: - 0 Map 1 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + 1 Map 3 + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: char(20)) Execution mode: vectorized - Reducer 3 + Map 3 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 13 Data size: 324 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: c2 is not null (type: boolean) + Statistics: Num rows: 13 Data size: 324 Basic stats: COMPLETE Column stats: NONE + Select Operator + expressions: c1 (type: int), c2 (type: char(20)) + outputColumnNames: _col0, _col1 + Statistics: Num rows: 13 Data size: 324 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: _col1 (type: char(20)) + sort order: + + Map-reduce partition columns: _col1 (type: char(20)) + Statistics: Num rows: 13 Data size: 324 Basic stats: COMPLETE Column stats: NONE + value expressions: _col0 (type: int) + Execution mode: vectorized + Reducer 2 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(10)), VALUE._col1 (type: int), VALUE._col2 (type: char(20)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -359,14 +359,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -376,31 +376,31 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: string) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized Reducer 2 @@ -409,10 +409,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(10)), VALUE._col1 (type: int), VALUE._col2 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out b/ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out index 7e6638e..c1c41b0 100644 --- a/ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_decimal_10_0.q.out @@ -55,15 +55,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) sort order: + - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -71,10 +71,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(10,0)) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out index efc77ff..4729a88 100644 --- a/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_decimal_aggregate.q.out @@ -56,22 +56,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_vgby - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) outputColumnNames: cint, cdecimal1, cdecimal2 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() keys: cint (type: int) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)), _col9 (type: bigint) Execution mode: vectorized Reducer 2 @@ -82,17 +82,17 @@ STAGE PLANS: keys: KEY._col0 (type: int) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 280 Data size: 63891 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col9 > 1) (type: boolean) - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -162,22 +162,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_vgby - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) outputColumnNames: cint, cdecimal1, cdecimal2 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), avg(cdecimal1), stddev_pop(cdecimal1), stddev_samp(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), avg(cdecimal2), stddev_pop(cdecimal2), stddev_samp(cdecimal2), count() keys: cint (type: int) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 (type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: struct), _col13 (type: struct), _col14 (type: struct), _col15 (type: bigint) Execution mode: vectorized Reducer 2 @@ -187,17 +187,17 @@ STAGE PLANS: keys: KEY._col0 (type: int) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 280 Data size: 63891 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col15 > 1) (type: boolean) - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: decimal(24,14)), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 (type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: decimal(27,18)), _col13 (type: double), _col14 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14 - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out index e5e5b4b..20b357c 100644 --- a/ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_decimal_expressions.q.out @@ -39,18 +39,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_test - Statistics: Num rows: 12288 Data size: 2128368 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 444 Data size: 103076 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((cdecimal1 > 0) and (UDFToDouble(cdecimal1) < 12345.5678) and (cdecimal2 <> 0) and (cdecimal2 > 1000) and cdouble is not null) (type: boolean) - Statistics: Num rows: 455 Data size: 78809 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3714 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (cdecimal1 + cdecimal2) (type: decimal(25,14)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(26,14)), ((UDFToDouble(cdecimal1) + 2.34) / UDFToDouble(cdecimal2)) (type: double), (UDFToDouble(cdecimal1) * (UDFToDouble(cdecimal2) / 3.4)) (type: double), (cdecimal1 % 10) (type: decimal(12,10)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), UDFToString(cdecimal2) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 455 Data size: 78809 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3714 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(25,14)), _col1 (type: decimal(26,14)), _col2 (type: double), _col3 (type: double), _col4 (type: decimal(12,10)), _col5 (type: int), _col6 (type: smallint), _col7 (type: tinyint), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: double), _col11 (type: float), _col12 (type: string), _col13 (type: timestamp) sort order: ++++++++++++++ - Statistics: Num rows: 455 Data size: 78809 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3714 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reducer 2 @@ -59,13 +59,13 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(25,14)), KEY.reducesinkkey1 (type: decimal(26,14)), KEY.reducesinkkey2 (type: double), KEY.reducesinkkey3 (type: double), KEY.reducesinkkey4 (type: decimal(12,10)), KEY.reducesinkkey5 (type: int), KEY.reducesinkkey6 (type: smallint), KEY.reducesinkkey7 (type: tinyint), KEY.reducesinkkey8 (type: bigint), KEY.reducesinkkey9 (type: boolean), KEY.reducesinkkey10 (type: double), KEY.reducesinkkey11 (type: float), KEY.reducesinkkey12 (type: string), KEY.reducesinkkey13 (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 455 Data size: 78809 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3714 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 1730 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2320 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 1730 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2320 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_grouping_sets.q.out b/ql/src/test/results/clientpositive/tez/vector_grouping_sets.q.out index 86c7306..244a117 100644 --- a/ql/src/test/results/clientpositive/tez/vector_grouping_sets.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_grouping_sets.q.out @@ -153,32 +153,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: store - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: s_store_id (type: string) outputColumnNames: s_store_id - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: s_store_id (type: string), '0' (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 51264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 88 Data size: 8976 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 24 Data size: 51264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 88 Data size: 8976 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE pruneGroupingSetId: true File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -235,21 +235,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: store - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: s_store_id (type: string) outputColumnNames: s_store_id - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: s_store_id (type: string), '0' (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 51264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 88 Data size: 8976 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 24 Data size: 51264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 88 Data size: 8976 Basic stats: COMPLETE Column stats: NONE Reducer 2 Execution mode: vectorized Reduce Operator Tree: @@ -257,14 +257,14 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_interval_mapjoin.q.out b/ql/src/test/results/clientpositive/tez/vector_interval_mapjoin.q.out index 4775167..d1aa45a 100644 --- a/ql/src/test/results/clientpositive/tez/vector_interval_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_interval_mapjoin.q.out @@ -196,14 +196,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vectortab_a_1korc - Statistics: Num rows: 1000 Data size: 460264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 258 Data size: 50720 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (s is not null and (dt - CAST( ts AS DATE)) is not null) (type: boolean) - Statistics: Num rows: 1000 Data size: 460264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 258 Data size: 50720 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: s (type: string), (dt - CAST( ts AS DATE)) (type: interval_day_time) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1000 Data size: 460264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 258 Data size: 50720 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -213,15 +213,15 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2 input vertices: 1 Map 2 - Statistics: Num rows: 1100 Data size: 506290 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 283 Data size: 55792 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: string), _col2 (type: string), _col1 (type: interval_day_time) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1100 Data size: 506290 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 283 Data size: 55792 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1100 Data size: 506290 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 283 Data size: 55792 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -231,19 +231,19 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vectortab_b_1korc - Statistics: Num rows: 1000 Data size: 458448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 257 Data size: 50442 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (s is not null and (dt - CAST( ts AS DATE)) is not null) (type: boolean) - Statistics: Num rows: 1000 Data size: 458448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 257 Data size: 50442 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: s (type: string), (dt - CAST( ts AS DATE)) (type: interval_day_time) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1000 Data size: 458448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 257 Data size: 50442 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: interval_day_time), _col0 (type: string) sort order: ++ Map-reduce partition columns: _col1 (type: interval_day_time), _col0 (type: string) - Statistics: Num rows: 1000 Data size: 458448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 257 Data size: 50442 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 diff --git a/ql/src/test/results/clientpositive/tez/vector_join30.q.out b/ql/src/test/results/clientpositive/tez/vector_join30.q.out index dd5b5aa..9df319b 100644 --- a/ql/src/test/results/clientpositive/tez/vector_join30.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_join30.q.out @@ -48,35 +48,35 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reducer 2 @@ -84,7 +84,7 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -94,7 +94,7 @@ STAGE PLANS: outputColumnNames: _col2, _col3 input vertices: 1 Reducer 5 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Group By Operator aggregations: sum(hash(_col2,_col3)) @@ -126,12 +126,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Stage: Stage-0 @@ -193,29 +193,29 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reducer 2 @@ -223,7 +223,7 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -233,7 +233,7 @@ STAGE PLANS: outputColumnNames: _col2, _col3 input vertices: 1 Reducer 5 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Group By Operator aggregations: sum(hash(_col2,_col3)) @@ -265,12 +265,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Stage: Stage-0 @@ -332,29 +332,29 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reducer 2 @@ -363,18 +363,18 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reducer 4 Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Right Outer Join0 to 1 @@ -384,7 +384,7 @@ STAGE PLANS: outputColumnNames: _col2, _col3 input vertices: 0 Reducer 2 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Group By Operator aggregations: sum(hash(_col2,_col3)) @@ -477,53 +477,53 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Map 6 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reducer 2 @@ -531,7 +531,7 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -544,7 +544,7 @@ STAGE PLANS: input vertices: 1 Reducer 5 2 Reducer 7 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Group By Operator aggregations: sum(hash(_col2,_col3)) @@ -576,12 +576,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 7 Execution mode: vectorized @@ -589,12 +589,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator @@ -669,44 +669,44 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Map 7 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reducer 2 @@ -715,12 +715,12 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Merge Join Operator @@ -732,7 +732,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -763,12 +763,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 8 Execution mode: vectorized @@ -776,12 +776,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator @@ -856,44 +856,44 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Map 7 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reducer 2 @@ -902,12 +902,12 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Merge Join Operator @@ -919,7 +919,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -950,12 +950,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 8 Execution mode: vectorized @@ -963,12 +963,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator @@ -1043,44 +1043,44 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Map 7 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reducer 2 @@ -1089,12 +1089,12 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Merge Join Operator @@ -1106,7 +1106,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1137,12 +1137,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 8 Execution mode: vectorized @@ -1150,12 +1150,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator @@ -1230,44 +1230,44 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Map 7 Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reducer 2 @@ -1276,12 +1276,12 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Merge Join Operator @@ -1293,7 +1293,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1324,12 +1324,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reducer 8 Execution mode: vectorized @@ -1337,12 +1337,12 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator diff --git a/ql/src/test/results/clientpositive/tez/vector_leftsemi_mapjoin.q.out b/ql/src/test/results/clientpositive/tez/vector_leftsemi_mapjoin.q.out index 92ad7b9..0945d32 100644 --- a/ql/src/test/results/clientpositive/tez/vector_leftsemi_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_leftsemi_mapjoin.q.out @@ -153,10 +153,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -166,42 +166,42 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -250,10 +250,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -263,42 +263,42 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -349,10 +349,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -362,11 +362,11 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan @@ -394,10 +394,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -440,10 +440,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -453,15 +453,15 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan @@ -489,10 +489,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -546,10 +546,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -559,42 +559,42 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -662,10 +662,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -675,24 +675,24 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -738,32 +738,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -773,24 +773,24 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -833,32 +833,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -868,20 +868,20 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -942,42 +942,42 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1039,10 +1039,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -1052,42 +1052,42 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: (2 * _col0) (type: int) sort order: + Map-reduce partition columns: (2 * _col0) (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1126,18 +1126,32 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Map 2 <- Map 1 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -1148,7 +1162,7 @@ STAGE PLANS: 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 input vertices: - 1 Map 3 + 0 Map 1 2 Map 4 Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -1160,20 +1174,6 @@ STAGE PLANS: sort order: ++ Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: int), _col3 (type: string) - Map 3 - Map Operator Tree: - TableScan - alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - value expressions: value (type: string) Map 4 Map Operator Tree: TableScan @@ -1196,7 +1196,7 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE - Reducer 2 + Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) @@ -1285,24 +1285,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: int), _col1 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator @@ -1384,64 +1384,64 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1513,49 +1513,49 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1626,12 +1626,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan @@ -1646,21 +1646,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -1672,20 +1672,20 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1769,31 +1769,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -1805,20 +1805,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1902,31 +1902,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -1938,20 +1938,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2037,31 +2037,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -2073,20 +2073,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2186,7 +2186,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -2196,52 +2196,52 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2337,10 +2337,10 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 2 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2349,24 +2349,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator @@ -2405,10 +2405,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2418,43 +2418,43 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2503,10 +2503,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2516,43 +2516,43 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2603,10 +2603,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2616,12 +2616,12 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan @@ -2649,10 +2649,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2695,10 +2695,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2708,16 +2708,16 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan @@ -2745,10 +2745,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2802,10 +2802,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2815,43 +2815,43 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2919,10 +2919,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2932,25 +2932,25 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2996,32 +2996,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -3031,25 +3031,25 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3092,32 +3092,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -3127,21 +3127,21 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3202,43 +3202,43 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3300,10 +3300,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -3313,43 +3313,43 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: (2 * _col0) (type: int) sort order: + Map-reduce partition columns: (2 * _col0) (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3388,18 +3388,32 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Map 2 <- Map 1 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -3410,7 +3424,7 @@ STAGE PLANS: 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 input vertices: - 1 Map 3 + 0 Map 1 2 Map 4 Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true @@ -3423,20 +3437,6 @@ STAGE PLANS: sort order: ++ Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: int), _col3 (type: string) - Map 3 - Map Operator Tree: - TableScan - alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - value expressions: value (type: string) Map 4 Map Operator Tree: TableScan @@ -3459,7 +3459,7 @@ STAGE PLANS: sort order: + Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE - Reducer 2 + Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string), VALUE._col0 (type: int), VALUE._col1 (type: string) @@ -3549,24 +3549,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: int), _col1 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator @@ -3648,65 +3648,65 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3778,50 +3778,50 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3892,12 +3892,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan @@ -3912,21 +3912,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -3938,20 +3938,20 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4035,31 +4035,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -4071,20 +4071,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4168,31 +4168,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -4204,20 +4204,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4303,31 +4303,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Merge Join Operator @@ -4339,20 +4339,20 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4452,7 +4452,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Map Join Operator condition map: @@ -4463,53 +4463,53 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reducer 2 Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4605,11 +4605,11 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 2 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4618,24 +4618,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Stage: Stage-0 Fetch Operator @@ -4674,10 +4674,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4687,34 +4687,34 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -4722,10 +4722,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4774,10 +4774,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4787,34 +4787,34 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -4822,10 +4822,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4876,10 +4876,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4889,11 +4889,11 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: @@ -4924,10 +4924,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4970,10 +4970,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4983,15 +4983,15 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: @@ -5022,10 +5022,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5079,10 +5079,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5092,34 +5092,34 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -5127,10 +5127,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5199,10 +5199,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5212,15 +5212,15 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -5228,10 +5228,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5277,33 +5277,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5313,15 +5313,15 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -5329,10 +5329,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5375,33 +5375,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5411,11 +5411,11 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -5423,10 +5423,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5487,34 +5487,34 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -5522,10 +5522,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5587,10 +5587,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5600,34 +5600,34 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: (2 * _col0) (type: int) sort order: + Map-reduce partition columns: (2 * _col0) (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -5635,10 +5635,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5677,18 +5677,33 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Map 2 <- Map 1 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Execution mode: vectorized + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -5699,7 +5714,7 @@ STAGE PLANS: 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 input vertices: - 1 Map 3 + 0 Map 1 2 Map 4 Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -5712,21 +5727,6 @@ STAGE PLANS: Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: int), _col3 (type: string) Execution mode: vectorized - Map 3 - Map Operator Tree: - TableScan - alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - value expressions: value (type: string) - Execution mode: vectorized Map 4 Map Operator Tree: TableScan @@ -5750,7 +5750,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized - Reducer 2 + Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator @@ -5841,24 +5841,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: int), _col1 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -5942,57 +5942,57 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -6000,10 +6000,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6075,42 +6075,42 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -6118,10 +6118,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6192,12 +6192,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: @@ -6214,21 +6214,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -6241,21 +6241,21 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6340,32 +6340,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -6378,21 +6378,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6477,32 +6477,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -6515,21 +6515,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6616,32 +6616,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -6654,21 +6654,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6768,7 +6768,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -6778,45 +6778,45 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -6824,10 +6824,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6923,10 +6923,10 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 2 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6936,24 +6936,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 @@ -6993,10 +6993,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7006,35 +7006,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -7042,10 +7042,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7094,10 +7094,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7107,35 +7107,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -7143,10 +7143,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7197,10 +7197,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7210,12 +7210,12 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: @@ -7246,10 +7246,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7292,10 +7292,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7305,16 +7305,16 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: @@ -7345,10 +7345,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7402,10 +7402,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7415,35 +7415,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -7451,10 +7451,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7523,10 +7523,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7536,16 +7536,16 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -7553,10 +7553,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7602,33 +7602,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7638,16 +7638,16 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -7655,10 +7655,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7701,33 +7701,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7737,12 +7737,12 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -7750,10 +7750,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7814,35 +7814,35 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -7850,10 +7850,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7915,10 +7915,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7928,35 +7928,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: (2 * _col0) (type: int) sort order: + Map-reduce partition columns: (2 * _col0) (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -7964,10 +7964,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8006,18 +8006,33 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Map 2 <- Map 1 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Execution mode: vectorized + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -8028,7 +8043,7 @@ STAGE PLANS: 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 input vertices: - 1 Map 3 + 0 Map 1 2 Map 4 Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true @@ -8042,21 +8057,6 @@ STAGE PLANS: Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: int), _col3 (type: string) Execution mode: vectorized - Map 3 - Map Operator Tree: - TableScan - alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - value expressions: value (type: string) - Execution mode: vectorized Map 4 Map Operator Tree: TableScan @@ -8080,7 +8080,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized - Reducer 2 + Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator @@ -8172,24 +8172,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: int), _col1 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -8273,58 +8273,58 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -8332,10 +8332,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8407,43 +8407,43 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -8451,10 +8451,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8525,12 +8525,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: @@ -8547,21 +8547,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -8574,21 +8574,21 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8673,32 +8673,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -8711,21 +8711,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8810,32 +8810,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -8848,21 +8848,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8949,32 +8949,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -8987,21 +8987,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9101,7 +9101,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Map Join Operator condition map: @@ -9112,46 +9112,46 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -9159,10 +9159,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9258,11 +9258,11 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 2 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9272,24 +9272,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 @@ -9329,10 +9329,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9342,34 +9342,34 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -9377,10 +9377,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9429,10 +9429,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9442,34 +9442,34 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -9477,10 +9477,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9531,10 +9531,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9544,11 +9544,11 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: @@ -9579,10 +9579,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9625,10 +9625,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9638,15 +9638,15 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: @@ -9677,10 +9677,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9734,10 +9734,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9747,34 +9747,34 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -9782,10 +9782,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9854,10 +9854,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9867,15 +9867,15 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -9883,10 +9883,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9932,33 +9932,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9968,15 +9968,15 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -9984,10 +9984,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10030,33 +10030,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -10066,11 +10066,11 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -10078,10 +10078,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10142,34 +10142,34 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -10177,10 +10177,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10242,10 +10242,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -10255,34 +10255,34 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: (2 * _col0) (type: int) sort order: + Map-reduce partition columns: (2 * _col0) (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -10290,10 +10290,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10332,18 +10332,33 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Map 2 <- Map 1 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Execution mode: vectorized + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -10354,7 +10369,7 @@ STAGE PLANS: 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 input vertices: - 1 Map 3 + 0 Map 1 2 Map 4 Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE Select Operator @@ -10367,21 +10382,6 @@ STAGE PLANS: Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: int), _col3 (type: string) Execution mode: vectorized - Map 3 - Map Operator Tree: - TableScan - alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - value expressions: value (type: string) - Execution mode: vectorized Map 4 Map Operator Tree: TableScan @@ -10405,7 +10405,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized - Reducer 2 + Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator @@ -10496,24 +10496,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: int), _col1 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -10597,57 +10597,57 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -10655,10 +10655,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10730,42 +10730,42 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -10773,10 +10773,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10847,12 +10847,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: @@ -10869,21 +10869,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -10896,21 +10896,21 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10995,32 +10995,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -11033,21 +11033,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11132,32 +11132,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -11170,21 +11170,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11271,32 +11271,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -11309,21 +11309,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11423,7 +11423,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -11433,45 +11433,45 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -11479,10 +11479,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11578,10 +11578,10 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 2 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11591,24 +11591,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 @@ -11648,10 +11648,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11661,35 +11661,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -11697,10 +11697,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11749,10 +11749,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11762,35 +11762,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -11798,10 +11798,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11852,10 +11852,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11865,12 +11865,12 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: @@ -11901,10 +11901,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11947,10 +11947,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11960,16 +11960,16 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: @@ -12000,10 +12000,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12057,10 +12057,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -12070,35 +12070,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -12106,10 +12106,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12178,10 +12178,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -12191,16 +12191,16 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -12208,10 +12208,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12257,33 +12257,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -12293,16 +12293,16 @@ STAGE PLANS: outputColumnNames: _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -12310,10 +12310,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12356,33 +12356,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 2 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -12392,12 +12392,12 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -12405,10 +12405,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12469,35 +12469,35 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -12505,10 +12505,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12570,10 +12570,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -12583,35 +12583,35 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: (2 * _col0) (type: int) sort order: + Map-reduce partition columns: (2 * _col0) (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -12619,10 +12619,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12661,18 +12661,33 @@ STAGE PLANS: Tez #### A masked pattern was here #### Edges: - Map 1 <- Map 3 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) - Reducer 2 <- Map 1 (SIMPLE_EDGE) + Map 2 <- Map 1 (BROADCAST_EDGE), Map 4 (BROADCAST_EDGE) + Reducer 3 <- Map 2 (SIMPLE_EDGE) #### A masked pattern was here #### Vertices: Map 1 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Filter Operator + predicate: key is not null (type: boolean) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Reduce Output Operator + key expressions: key (type: int) + sort order: + + Map-reduce partition columns: key (type: int) + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE + value expressions: value (type: string) + Execution mode: vectorized + Map 2 + Map Operator Tree: + TableScan + alias: b + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -12683,7 +12698,7 @@ STAGE PLANS: 2 _col0 (type: int) outputColumnNames: _col0, _col1, _col5, _col6 input vertices: - 1 Map 3 + 0 Map 1 2 Map 4 Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true @@ -12697,21 +12712,6 @@ STAGE PLANS: Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE value expressions: _col2 (type: int), _col3 (type: string) Execution mode: vectorized - Map 3 - Map Operator Tree: - TableScan - alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Filter Operator - predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - Reduce Output Operator - key expressions: key (type: int) - sort order: + - Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE - value expressions: value (type: string) - Execution mode: vectorized Map 4 Map Operator Tree: TableScan @@ -12735,7 +12735,7 @@ STAGE PLANS: Map-reduce partition columns: _col0 (type: int) Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized - Reducer 2 + Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator @@ -12827,24 +12827,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: int), _col1 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -12928,58 +12928,58 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -12987,10 +12987,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13062,43 +13062,43 @@ STAGE PLANS: input vertices: 1 Map 3 2 Map 4 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -13106,10 +13106,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13180,12 +13180,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: @@ -13202,21 +13202,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -13229,21 +13229,21 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13328,32 +13328,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -13366,21 +13366,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13465,32 +13465,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -13503,21 +13503,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13604,32 +13604,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 5 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -13642,21 +13642,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reducer 3 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13756,7 +13756,7 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Map Join Operator condition map: @@ -13767,46 +13767,46 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 4 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 4 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: string) sort order: + Map-reduce partition columns: value (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -13814,10 +13814,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13913,11 +13913,11 @@ STAGE PLANS: outputColumnNames: _col0 input vertices: 1 Map 2 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13927,24 +13927,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 diff --git a/ql/src/test/results/clientpositive/tez/vector_nullsafe_join.q.out b/ql/src/test/results/clientpositive/tez/vector_nullsafe_join.q.out index 045b687..a49f01c 100644 --- a/ql/src/test/results/clientpositive/tez/vector_nullsafe_join.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_nullsafe_join.q.out @@ -70,7 +70,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -81,15 +81,15 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5, _col6 input vertices: 1 Map 2 - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -99,12 +99,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int) sort order: + Map-reduce partition columns: value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int) Execution mode: vectorized @@ -153,10 +153,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -169,15 +169,15 @@ STAGE PLANS: input vertices: 1 Map 2 2 Map 3 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -187,30 +187,30 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int) sort order: + Map-reduce partition columns: value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: int) Execution mode: vectorized @@ -250,7 +250,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -264,15 +264,15 @@ STAGE PLANS: input vertices: 1 Map 2 2 Map 3 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -282,24 +282,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int) sort order: + Map-reduce partition columns: value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: int) Execution mode: vectorized @@ -366,10 +366,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -383,15 +383,15 @@ STAGE PLANS: input vertices: 1 Map 2 2 Map 3 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -401,29 +401,29 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int), key (type: int) sort order: ++ Map-reduce partition columns: value (type: int), key (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int), value (type: int) sort order: ++ Map-reduce partition columns: key (type: int), value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 @@ -462,7 +462,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -476,15 +476,15 @@ STAGE PLANS: input vertices: 1 Map 2 2 Map 3 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -494,23 +494,23 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int), key (type: int) sort order: ++ Map-reduce partition columns: value (type: int), key (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int), value (type: int) sort order: ++ Map-reduce partition columns: key (type: int), value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 @@ -656,7 +656,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -667,15 +667,15 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col5, _col6 input vertices: 1 Map 2 - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -685,12 +685,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int) sort order: + Map-reduce partition columns: value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int) Execution mode: vectorized @@ -739,10 +739,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -755,15 +755,15 @@ STAGE PLANS: input vertices: 1 Map 2 2 Map 3 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -773,30 +773,30 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int) sort order: + Map-reduce partition columns: value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: int) Execution mode: vectorized @@ -836,7 +836,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -850,15 +850,15 @@ STAGE PLANS: input vertices: 1 Map 2 2 Map 3 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -868,24 +868,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int) sort order: + Map-reduce partition columns: value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: key (type: int) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE value expressions: value (type: int) Execution mode: vectorized @@ -952,10 +952,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -969,15 +969,15 @@ STAGE PLANS: input vertices: 1 Map 2 2 Map 3 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -987,29 +987,29 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int), key (type: int) sort order: ++ Map-reduce partition columns: value (type: int), key (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int), value (type: int) sort order: ++ Map-reduce partition columns: key (type: int), value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 @@ -1048,7 +1048,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -1062,15 +1062,15 @@ STAGE PLANS: input vertices: 1 Map 2 2 Map 3 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1080,23 +1080,23 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: value (type: int), key (type: int) sort order: ++ Map-reduce partition columns: value (type: int), key (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int), value (type: int) sort order: ++ Map-reduce partition columns: key (type: int), value (type: int) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 diff --git a/ql/src/test/results/clientpositive/tez/vector_outer_join6.q.out b/ql/src/test/results/clientpositive/tez/vector_outer_join6.q.out index bdcdc42..6f77541 100644 --- a/ql/src/test/results/clientpositive/tez/vector_outer_join6.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_outer_join6.q.out @@ -150,11 +150,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tjoin1 - Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 339 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 339 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -164,12 +164,12 @@ STAGE PLANS: outputColumnNames: _col0, _col2, _col3 input vertices: 1 Map 2 - Statistics: Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 407 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col2 (type: int), _col3 (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 407 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -179,15 +179,15 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col3 input vertices: 1 Map 3 - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 447 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col1 (type: int), _col3 (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 447 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 447 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -197,32 +197,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tjoin2 - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: tjoin3 - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 342 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 342 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 342 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized @@ -274,11 +274,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tjoin1 - Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 339 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 339 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -288,12 +288,12 @@ STAGE PLANS: outputColumnNames: _col0, _col2, _col3 input vertices: 1 Map 2 - Statistics: Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 407 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Select Operator expressions: _col0 (type: int), _col2 (type: int), _col3 (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 407 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -303,11 +303,11 @@ STAGE PLANS: outputColumnNames: _col0, _col1 input vertices: 1 Map 3 - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 376 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true File Output Operator compressed: false - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 376 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -317,32 +317,32 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tjoin2 - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: int) sort order: + Map-reduce partition columns: _col1 (type: int) - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: tjoin3 - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 85 Data size: 342 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 85 Data size: 342 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 85 Data size: 342 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Stage: Stage-0 diff --git a/ql/src/test/results/clientpositive/tez/vector_partitioned_date_time.q.out b/ql/src/test/results/clientpositive/tez/vector_partitioned_date_time.q.out index 13e70dd..d501053 100644 --- a/ql/src/test/results/clientpositive/tez/vector_partitioned_date_time.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_partitioned_date_time.q.out @@ -279,15 +279,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: flights_tiny_orc - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: origin_city_name (type: string), dest_city_name (type: string), fl_date (type: date), fl_time (type: timestamp), arr_delay (type: float), fl_num (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col5 (type: int), _col2 (type: date) sort order: ++ - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: string), _col1 (type: string), _col3 (type: timestamp), _col4 (type: float) Execution mode: vectorized @@ -297,14 +297,14 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), KEY.reducesinkkey1 (type: date), VALUE._col2 (type: timestamp), VALUE._col3 (type: float), KEY.reducesinkkey0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 25 - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col5 (type: int), _col2 (type: date) sort order: ++ - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: string), _col1 (type: string), _col3 (type: timestamp), _col4 (type: float) Reducer 3 @@ -313,13 +313,13 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), KEY.reducesinkkey1 (type: date), VALUE._col2 (type: timestamp), VALUE._col3 (type: float), KEY.reducesinkkey0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 25 - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -386,22 +386,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: flights_tiny_orc - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: fl_date (type: date) outputColumnNames: fl_date - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() keys: fl_date (type: date) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + Map-reduce partition columns: _col0 (type: date) - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 1457 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reducer 2 @@ -412,10 +412,10 @@ STAGE PLANS: keys: KEY._col0 (type: date) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 68 Data size: 19584 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 13 Data size: 728 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 68 Data size: 19584 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 13 Data size: 728 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vector_reduce_groupby_decimal.q.out b/ql/src/test/results/clientpositive/tez/vector_reduce_groupby_decimal.q.out index 8c58232..7bfc1fb 100644 --- a/ql/src/test/results/clientpositive/tez/vector_reduce_groupby_decimal.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_reduce_groupby_decimal.q.out @@ -45,21 +45,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_test - Statistics: Num rows: 6102 Data size: 1440072 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 477 Data size: 112623 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cdecimal1 is not null and cdecimal2 is not null) (type: boolean) - Statistics: Num rows: 6102 Data size: 1440072 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 477 Data size: 112623 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: min(cdecimal1) keys: cint (type: int), cdouble (type: double), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 6102 Data size: 1440072 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 477 Data size: 112623 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: double), _col2 (type: decimal(20,10)), _col3 (type: decimal(23,14)) sort order: ++++ Map-reduce partition columns: _col0 (type: int), _col1 (type: double), _col2 (type: decimal(20,10)), _col3 (type: decimal(23,14)) - Statistics: Num rows: 6102 Data size: 1440072 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 477 Data size: 112623 Basic stats: COMPLETE Column stats: NONE value expressions: _col4 (type: decimal(20,10)) Execution mode: vectorized Reducer 2 @@ -70,11 +70,11 @@ STAGE PLANS: keys: KEY._col0 (type: int), KEY._col1 (type: double), KEY._col2 (type: decimal(20,10)), KEY._col3 (type: decimal(23,14)) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 3051 Data size: 720036 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 238 Data size: 56193 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: double), _col2 (type: decimal(20,10)), _col3 (type: decimal(23,14)) sort order: ++++ - Statistics: Num rows: 3051 Data size: 720036 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 238 Data size: 56193 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col4 (type: decimal(20,10)) Reducer 3 @@ -83,7 +83,7 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: double), KEY.reducesinkkey2 (type: decimal(20,10)), KEY.reducesinkkey3 (type: decimal(23,14)), VALUE._col0 (type: decimal(20,10)) outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 3051 Data size: 720036 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 238 Data size: 56193 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 50 Statistics: Num rows: 50 Data size: 11800 Basic stats: COMPLETE Column stats: NONE diff --git a/ql/src/test/results/clientpositive/tez/vector_varchar_mapjoin1.q.out b/ql/src/test/results/clientpositive/tez/vector_varchar_mapjoin1.q.out index d50d875..ecd8dc6 100644 --- a/ql/src/test/results/clientpositive/tez/vector_varchar_mapjoin1.q.out +++ b/ql/src/test/results/clientpositive/tez/vector_varchar_mapjoin1.q.out @@ -147,14 +147,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -164,31 +164,31 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: varchar(10)) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: varchar(10)) sort order: + Map-reduce partition columns: _col1 (type: varchar(10)) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized Reducer 2 @@ -197,10 +197,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(10)), VALUE._col1 (type: int), VALUE._col2 (type: varchar(10)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -246,14 +246,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -263,31 +263,31 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: varchar(20)) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(20)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 304 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: varchar(20)) sort order: + Map-reduce partition columns: _col1 (type: varchar(20)) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 304 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized Reducer 2 @@ -296,10 +296,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(10)), VALUE._col1 (type: int), VALUE._col2 (type: varchar(20)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -347,14 +347,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -364,31 +364,31 @@ STAGE PLANS: outputColumnNames: _col0, _col1, _col2, _col3 input vertices: 1 Map 3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE HybridGraceHashJoin: true Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: string) Execution mode: vectorized Map 3 Map Operator Tree: TableScan alias: b - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + Map-reduce partition columns: _col1 (type: string) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: int) Execution mode: vectorized Reducer 2 @@ -397,10 +397,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(10)), VALUE._col1 (type: int), VALUE._col2 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out index 70f8d1b..06a1410 100644 --- a/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorization_short_regress.q.out @@ -3074,9 +3074,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 951 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 951 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash @@ -3140,11 +3140,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(ctinyint) mode: hash @@ -3208,11 +3208,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int) outputColumnNames: cint - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cint) mode: hash @@ -3276,11 +3276,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cfloat (type: float) outputColumnNames: cfloat - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cfloat) mode: hash @@ -3344,11 +3344,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cstring1 (type: string) outputColumnNames: cstring1 - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cstring1) mode: hash @@ -3412,11 +3412,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cboolean1 (type: boolean) outputColumnNames: cboolean1 - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cboolean1) mode: hash diff --git a/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out b/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out index a790b97..9641877 100644 --- a/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorized_dynamic_partition_pruning.q.out @@ -235,33 +235,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -360,19 +360,19 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -488,33 +488,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Map 6 @@ -522,33 +522,33 @@ STAGE PLANS: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -670,38 +670,38 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 6 Map Operator Tree: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -830,48 +830,48 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 1440 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col2 (type: string) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -970,19 +970,19 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 1440 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col2 (type: string) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -1093,33 +1093,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -1218,19 +1218,19 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -1339,33 +1339,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: UDFToDouble(hr) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -1464,33 +1464,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: (UDFToDouble(hr) * 2.0) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -1589,19 +1589,19 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -1699,19 +1699,19 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -1822,33 +1822,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToString(_col0) (type: string) sort order: + Map-reduce partition columns: UDFToString(_col0) (type: string) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToString(_col0) (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: UDFToString((UDFToDouble(hr) * 2.0)) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -2112,17 +2112,17 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0)) (type: boolean) - Statistics: Num rows: 4 Data size: 1440 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0)) (type: boolean) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator sort order: - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string), _col2 (type: string) Execution mode: vectorized Reducer 2 @@ -2229,48 +2229,48 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 1440 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col2 (type: string) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -2371,33 +2371,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -2462,33 +2462,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 4 Execution mode: vectorized Map 4 @@ -2583,33 +2583,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -2694,33 +2694,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Map 6 @@ -2728,19 +2728,19 @@ STAGE PLANS: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and (UDFToDouble(hr) = 11.0)) (type: boolean) - Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and (UDFToDouble(hr) = 11.0)) (type: boolean) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -2861,38 +2861,38 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Map 6 Map Operator Tree: TableScan alias: srcpart_hour filterExpr: (UDFToDouble(hr) = 13.0) (type: boolean) - Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 275 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(hr) = 13.0) (type: boolean) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 137 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 137 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 137 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Reduce Operator Tree: @@ -3688,33 +3688,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -3836,33 +3836,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Map 4 @@ -3870,33 +3870,33 @@ STAGE PLANS: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -4007,48 +4007,48 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 1440 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col2 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col2 (type: string) - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: _col2 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: hr - Statistics: Num rows: 1 Data size: 360 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -4156,33 +4156,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = 'I DONT EXIST') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -4266,33 +4266,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) sort order: + Map-reduce partition columns: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToInteger((_col0 / UDFToDouble(2)))) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: UDFToDouble(hr) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -4387,33 +4387,33 @@ STAGE PLANS: TableScan alias: srcpart_double_hour filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (string) Target Input: srcpart Partition key expr: (UDFToDouble(hr) * 2.0) - Statistics: Num rows: 1 Data size: 94 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 108 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -4670,33 +4670,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Reducer 2 @@ -4743,14 +4743,14 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -4845,14 +4845,14 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (date = '2008-04-08') (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Right Outer Join0 to 1 @@ -4960,33 +4960,33 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart Partition key expr: ds - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Execution mode: vectorized Map 4 @@ -4994,19 +4994,19 @@ STAGE PLANS: TableScan alias: srcpart_hour filterExpr: ((UDFToDouble(hour) = 11.0) and (UDFToDouble(hr) = 11.0)) (type: boolean) - Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((UDFToDouble(hour) = 11.0) and (UDFToDouble(hr) = 11.0)) (type: boolean) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 275 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 2 Execution mode: vectorized @@ -5093,14 +5093,14 @@ STAGE PLANS: TableScan alias: srcpart_date filterExpr: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 376 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 329 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((date = '2008-04-08') and ds is not null) (type: boolean) - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 164 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -5137,19 +5137,19 @@ STAGE PLANS: TableScan alias: srcpart_hour filterExpr: (UDFToDouble(hr) = 13.0) (type: boolean) - Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 275 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(hr) = 13.0) (type: boolean) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 137 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hr (type: string) outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 137 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 1 Data size: 172 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 137 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reducer 3 Execution mode: vectorized @@ -5470,48 +5470,48 @@ STAGE PLANS: TableScan alias: srcpart_date_hour filterExpr: (((date = '2008-04-08') or (date = '2008-04-09')) and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 4 Data size: 1440 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((date = '2008-04-08') or (date = '2008-04-09')) and (UDFToDouble(hour) = 11.0) and ds is not null and hr is not null) (type: boolean) - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ds (type: string), hr (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), UDFToDouble(_col2) (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: string), UDFToDouble(_col2) (type: double) - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: ds (string) Target Input: srcpart_orc Partition key expr: ds - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Select Operator expressions: UDFToDouble(_col2) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Dynamic Partitioning Event Operator Target column: hr (int) Target Input: srcpart_orc Partition key expr: UDFToDouble(hr) - Statistics: Num rows: 2 Data size: 720 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 506 Basic stats: COMPLETE Column stats: NONE Target Vertex: Map 1 Reducer 2 Reduce Operator Tree: diff --git a/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out index 3d1f22f..615d100 100644 --- a/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out +++ b/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out @@ -6094,15 +6094,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns p_mfgr,p_name,p_size,r,dr,s columns.comments columns.types string:string:int:int:int:double #### A masked pattern was here #### name default.part_4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_4 @@ -6212,15 +6217,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1 columns.comments columns.types string:string:int:int:int:int:double:int #### A masked pattern was here #### name default.part_5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_5 @@ -6240,15 +6250,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns p_mfgr,p_name,p_size,r,dr,s columns.comments columns.types string:string:int:int:int:double #### A masked pattern was here #### name default.part_4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_4 @@ -6266,15 +6281,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1 columns.comments columns.types string:string:int:int:int:int:double:int #### A masked pattern was here #### name default.part_5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_5 diff --git a/ql/src/test/results/clientpositive/transform_ppr1.q.out b/ql/src/test/results/clientpositive/transform_ppr1.q.out index a5c5dbe..697c2d0 100644 --- a/ql/src/test/results/clientpositive/transform_ppr1.q.out +++ b/ql/src/test/results/clientpositive/transform_ppr1.q.out @@ -171,17 +171,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -217,17 +222,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -263,17 +273,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -309,17 +324,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/transform_ppr2.q.out b/ql/src/test/results/clientpositive/transform_ppr2.q.out index 5224787..2f1502b 100644 --- a/ql/src/test/results/clientpositive/transform_ppr2.q.out +++ b/ql/src/test/results/clientpositive/transform_ppr2.q.out @@ -173,17 +173,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -219,17 +224,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/truncate_column.q.out b/ql/src/test/results/clientpositive/truncate_column.q.out index adbddfa..2efba75 100644 --- a/ql/src/test/results/clientpositive/truncate_column.q.out +++ b/ql/src/test/results/clientpositive/truncate_column.q.out @@ -104,6 +104,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 1 + numRows 10 + rawDataSize 94 totalSize 150 #### A masked pattern was here #### @@ -176,6 +178,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 1 + numRows 10 + rawDataSize 94 totalSize 75 #### A masked pattern was here #### @@ -238,6 +242,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 1 + numRows 10 + rawDataSize 94 totalSize 75 #### A masked pattern was here #### @@ -375,6 +381,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 1 + numRows 10 + rawDataSize 94 totalSize 150 #### A masked pattern was here #### @@ -438,6 +446,8 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### numFiles 1 + numRows 10 + rawDataSize 94 totalSize 75 #### A masked pattern was here #### @@ -581,6 +591,8 @@ Table: test_tab_part #### A masked pattern was here #### Partition Parameters: numFiles 1 + numRows 10 + rawDataSize 94 totalSize 150 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out b/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out index 1fa437b..9e118eb 100644 --- a/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out +++ b/ql/src/test/results/clientpositive/truncate_column_list_bucket.q.out @@ -114,8 +114,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.test_tab numFiles 2 + numRows 500 partition_columns part partition_columns.types string + rawDataSize 4812 serialization.ddl struct test_tab { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe @@ -126,17 +128,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.test_tab + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct test_tab { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.test_tab @@ -144,16 +151,16 @@ STAGE PLANS: Processor Tree: TableScan alias: test_tab - Statistics: Num rows: 17 Data size: 1761 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (key = '484') (type: boolean) - Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: '484' (type: string), value (type: string), '1' (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: SELECT * FROM test_tab WHERE part = '1' AND key = '484' @@ -218,8 +225,10 @@ STAGE PLANS: #### A masked pattern was here #### name default.test_tab numFiles 2 + numRows 500 partition_columns part partition_columns.types string + rawDataSize 4812 serialization.ddl struct test_tab { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe @@ -230,17 +239,22 @@ STAGE PLANS: input format: org.apache.hadoop.hive.ql.io.RCFileInputFormat output format: org.apache.hadoop.hive.ql.io.RCFileOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments columns.types string:string #### A masked pattern was here #### name default.test_tab + numFiles 0 + numRows 0 partition_columns part partition_columns.types string + rawDataSize 0 serialization.ddl struct test_tab { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.test_tab @@ -248,16 +262,16 @@ STAGE PLANS: Processor Tree: TableScan alias: test_tab - Statistics: Num rows: 17 Data size: 1761 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 500 Data size: 4812 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (key = '0') (type: boolean) - Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: '0' (type: string), value (type: string), '1' (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 8 Data size: 828 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 250 Data size: 2406 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: SELECT * FROM test_tab WHERE part = '1' AND key = '0' diff --git a/ql/src/test/results/clientpositive/unicode_notation.q.out b/ql/src/test/results/clientpositive/unicode_notation.q.out index 52da674..37848b0 100644 --- a/ql/src/test/results/clientpositive/unicode_notation.q.out +++ b/ql/src/test/results/clientpositive/unicode_notation.q.out @@ -27,6 +27,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -73,6 +78,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -119,6 +129,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/union22.q.out b/ql/src/test/results/clientpositive/union22.q.out index 5309c71..5e54352 100644 --- a/ql/src/test/results/clientpositive/union22.q.out +++ b/ql/src/test/results/clientpositive/union22.q.out @@ -267,17 +267,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -392,17 +397,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -437,17 +447,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -484,17 +499,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -517,17 +537,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -587,17 +612,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -617,17 +647,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -714,17 +749,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -759,17 +799,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta diff --git a/ql/src/test/results/clientpositive/union24.q.out b/ql/src/test/results/clientpositive/union24.q.out index 993b838..10d96c9 100644 --- a/ql/src/test/results/clientpositive/union24.q.out +++ b/ql/src/test/results/clientpositive/union24.q.out @@ -198,28 +198,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src5 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) auto parallelism: false @@ -232,7 +232,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -240,8 +239,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -252,7 +249,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -260,8 +256,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -279,7 +273,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 51 Data size: 244 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 210 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -303,24 +297,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -339,24 +333,24 @@ STAGE PLANS: MultiFileSpray: false TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -375,24 +369,24 @@ STAGE PLANS: MultiFileSpray: false TableScan alias: src4 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -412,13 +406,13 @@ STAGE PLANS: TableScan GatherStats: false Union - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 360 Data size: 1726 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1887 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -464,7 +458,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -472,8 +465,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -484,7 +475,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -492,8 +482,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -508,7 +496,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -516,8 +503,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -528,7 +513,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -536,8 +520,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -552,7 +534,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -560,8 +541,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -572,7 +551,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -580,8 +558,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -796,42 +772,42 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: 0 auto parallelism: false TableScan alias: b - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE tag: 1 value expressions: _col1 (type: bigint) auto parallelism: false @@ -844,7 +820,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -852,8 +827,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -864,7 +837,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -872,8 +844,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -888,7 +858,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -896,8 +865,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -908,7 +875,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -916,8 +882,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -938,11 +902,11 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col2 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -966,24 +930,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1002,24 +966,24 @@ STAGE PLANS: MultiFileSpray: false TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1039,13 +1003,13 @@ STAGE PLANS: TableScan GatherStats: false Union - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 319 Data size: 1531 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 1696 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1091,7 +1055,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1099,8 +1062,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1111,7 +1072,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1119,8 +1079,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1135,7 +1093,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1143,8 +1100,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1155,7 +1110,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1163,8 +1117,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1373,42 +1325,42 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: 0 auto parallelism: false TableScan alias: b - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: 1 auto parallelism: false Path -> Alias: @@ -1420,7 +1372,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1428,8 +1379,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1440,7 +1389,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1448,8 +1396,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1464,7 +1410,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1472,8 +1417,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1484,7 +1427,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1492,8 +1434,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1514,13 +1454,13 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -1549,7 +1489,7 @@ STAGE PLANS: null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) auto parallelism: false @@ -1585,7 +1525,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 56 Data size: 269 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -1609,24 +1549,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1645,24 +1585,24 @@ STAGE PLANS: MultiFileSpray: false TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1682,13 +1622,13 @@ STAGE PLANS: TableScan GatherStats: false Union - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 262 Data size: 1257 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 1349 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -1734,7 +1674,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1742,8 +1681,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1754,7 +1691,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1762,8 +1698,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1778,7 +1712,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1786,8 +1719,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1798,7 +1729,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -1806,8 +1736,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe diff --git a/ql/src/test/results/clientpositive/union27.q.out b/ql/src/test/results/clientpositive/union27.q.out index f023360..babe3dc 100644 --- a/ql/src/test/results/clientpositive/union27.q.out +++ b/ql/src/test/results/clientpositive/union27.q.out @@ -50,54 +50,54 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE TableScan alias: dim_pho - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan alias: jackson_sev_add - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -107,14 +107,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col1, _col2 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 550 Data size: 5843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/union31.q.out b/ql/src/test/results/clientpositive/union31.q.out index bb35d5c..788141e 100644 --- a/ql/src/test/results/clientpositive/union31.q.out +++ b/ql/src/test/results/clientpositive/union31.q.out @@ -86,39 +86,39 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Select Operator expressions: _col1 (type: string) outputColumnNames: _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -127,39 +127,39 @@ STAGE PLANS: serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Select Operator expressions: _col1 (type: string) outputColumnNames: _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -172,14 +172,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -207,7 +207,7 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -215,14 +215,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -357,22 +357,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -380,7 +380,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -393,38 +393,38 @@ STAGE PLANS: Map Operator Tree: TableScan Union - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) TableScan Union - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Forward - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -435,14 +435,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -480,22 +480,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -503,7 +503,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -667,22 +667,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -690,11 +690,11 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -707,50 +707,50 @@ STAGE PLANS: Map Operator Tree: TableScan Union - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Forward - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -761,14 +761,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat diff --git a/ql/src/test/results/clientpositive/union32.q.out b/ql/src/test/results/clientpositive/union32.q.out index a3fefa8..cba48d3 100644 --- a/ql/src/test/results/clientpositive/union32.q.out +++ b/ql/src/test/results/clientpositive/union32.q.out @@ -58,38 +58,38 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(key)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -162,34 +162,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -198,11 +198,11 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -215,26 +215,26 @@ STAGE PLANS: Map Operator Tree: TableScan Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -319,34 +319,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -355,11 +355,11 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -372,26 +372,26 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -476,34 +476,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -512,11 +512,11 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(UDFToDouble(_col1)) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -529,26 +529,26 @@ STAGE PLANS: Map Operator Tree: TableScan Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double), key (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -633,34 +633,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -669,11 +669,11 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToDouble(_col1) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -686,26 +686,26 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe TableScan Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/unionDistinct_1.q.out b/ql/src/test/results/clientpositive/unionDistinct_1.q.out index 61bfa74..9ca5ef6 100644 --- a/ql/src/test/results/clientpositive/unionDistinct_1.q.out +++ b/ql/src/test/results/clientpositive/unionDistinct_1.q.out @@ -7015,17 +7015,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -7140,17 +7145,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -7185,17 +7195,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -7303,17 +7318,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -7340,17 +7360,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -7369,17 +7394,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -7466,17 +7496,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k1,k2,k3,k4 columns.comments columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -7511,17 +7546,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns k0,k1,k2,k3,k4,k5 columns.comments columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta + numFiles 0 + numRows 0 partition_columns ds partition_columns.types string + rawDataSize 0 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -8621,56 +8661,56 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false Path -> Alias: @@ -8682,7 +8722,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8690,8 +8729,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8702,7 +8739,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8710,8 +8746,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8726,7 +8760,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8734,8 +8767,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8746,7 +8777,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8754,8 +8784,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8773,7 +8801,7 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -8798,45 +8826,45 @@ STAGE PLANS: TableScan GatherStats: false Union - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false TableScan alias: src4 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false Path -> Alias: @@ -8868,7 +8896,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8876,8 +8903,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8888,7 +8913,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -8896,8 +8920,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -8915,7 +8937,7 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -8940,35 +8962,35 @@ STAGE PLANS: TableScan GatherStats: false Union - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false TableScan GatherStats: false Union - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 154 Data size: 738 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 769 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false Path -> Alias: @@ -9022,13 +9044,13 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 77 Data size: 369 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 329 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 77 Data size: 369 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 329 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -9051,28 +9073,28 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src5 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) auto parallelism: false @@ -9085,7 +9107,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9093,8 +9114,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9105,7 +9124,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9113,8 +9131,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9132,7 +9148,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 51 Data size: 244 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 210 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -9358,56 +9374,56 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false Path -> Alias: @@ -9419,7 +9435,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9427,8 +9442,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9439,7 +9452,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9447,8 +9459,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9463,7 +9473,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9471,8 +9480,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9483,7 +9490,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9491,8 +9497,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9510,7 +9514,7 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -9539,16 +9543,16 @@ STAGE PLANS: $hdt$_0-subquery2:$hdt$_0-subquery2:$hdt$_0:a TableScan alias: a - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -9561,32 +9565,32 @@ STAGE PLANS: TableScan GatherStats: false Union - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false TableScan alias: b - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -9595,24 +9599,24 @@ STAGE PLANS: 1 _col0 (type: string) outputColumnNames: _col0, _col2 Position of Big Table: 1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 216 Data size: 1037 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1137 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false Local Work: @@ -9646,7 +9650,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9654,8 +9657,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9666,7 +9667,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9674,8 +9674,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9690,7 +9688,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9698,8 +9695,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9710,7 +9705,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -9718,8 +9712,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -9737,13 +9729,13 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 108 Data size: 518 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 568 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 108 Data size: 518 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 568 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -9967,56 +9959,56 @@ STAGE PLANS: Map Operator Tree: TableScan alias: src2 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false TableScan alias: src3 - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), count (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 206 Data size: 988 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1118 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false Path -> Alias: @@ -10028,7 +10020,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -10036,8 +10027,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -10048,7 +10037,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -10056,8 +10044,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src2 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -10072,7 +10058,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -10080,8 +10065,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -10092,7 +10075,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -10100,8 +10082,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src3 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -10119,7 +10099,7 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 559 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -10144,35 +10124,35 @@ STAGE PLANS: TableScan GatherStats: false Union - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false TableScan GatherStats: false Union - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) null sort order: aa sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 159 Data size: 763 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 790 Basic stats: COMPLETE Column stats: NONE tag: -1 auto parallelism: false Path -> Alias: @@ -10226,13 +10206,13 @@ STAGE PLANS: keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 79 Data size: 379 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 338 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 #### A masked pattern was here #### NumFilesPerFileSink: 1 - Statistics: Num rows: 79 Data size: 379 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 338 Basic stats: COMPLETE Column stats: NONE #### A masked pattern was here #### table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat @@ -10260,16 +10240,16 @@ STAGE PLANS: $hdt$_0-subquery2:$hdt$_0-subquery2:$hdt$_0:$hdt$_0:a TableScan alias: a - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -10281,16 +10261,16 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 309 Data size: 1482 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 17 Data size: 1791 Basic stats: COMPLETE Column stats: NONE GatherStats: false Filter Operator isSamplingPred: false predicate: (UDFToDouble(key) < 10.0) (type: boolean) - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 103 Data size: 494 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 526 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -10299,19 +10279,19 @@ STAGE PLANS: 1 _col0 (type: string) outputColumnNames: _col0 Position of Big Table: 1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) null sort order: a sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 113 Data size: 543 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5 Data size: 578 Basic stats: COMPLETE Column stats: NONE tag: -1 value expressions: _col1 (type: bigint) auto parallelism: false @@ -10326,7 +10306,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -10334,8 +10313,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -10346,7 +10323,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -10354,8 +10330,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src4 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -10370,7 +10344,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -10378,8 +10351,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -10390,7 +10361,6 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: - COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,count columns.comments @@ -10398,8 +10368,6 @@ STAGE PLANS: #### A masked pattern was here #### name default.src5 numFiles 1 - numRows 309 - rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -10417,7 +10385,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 56 Data size: 269 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false GlobalTableId: 0 @@ -12018,54 +11986,54 @@ STAGE PLANS: Map Operator Tree: TableScan alias: dim_pho - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE TableScan alias: jackson_sev_add - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 5610 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 14 Data size: 2805 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -12083,14 +12051,14 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 58 Data size: 5812 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (UDFToDouble(key) = 97.0) (type: boolean) - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 2906 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -12107,14 +12075,14 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col1, _col2 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string), _col2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 275 Data size: 2921 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 31 Data size: 3196 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13476,65 +13444,65 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -13543,13 +13511,13 @@ STAGE PLANS: Select Operator expressions: _col1 (type: string) outputColumnNames: _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -13578,7 +13546,7 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -13586,14 +13554,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -13729,22 +13697,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -13752,7 +13720,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -13765,36 +13733,36 @@ STAGE PLANS: Map Operator Tree: TableScan Union - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan Union - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -13810,24 +13778,24 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Forward - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(VALUE._col0) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -13838,14 +13806,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 14 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -13883,22 +13851,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -13906,7 +13874,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -14071,22 +14039,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: key - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: key (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Reduce Operator Tree: Group By Operator @@ -14094,7 +14062,7 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 35 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -14107,46 +14075,46 @@ STAGE PLANS: Map Operator Tree: TableScan Union - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), cnt (type: bigint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 18 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 24 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string), _col1 (type: bigint) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: bigint) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: bigint) - Statistics: Num rows: 11 Data size: 53 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 104 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: bigint) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -14162,23 +14130,23 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Forward - Statistics: Num rows: 5 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -14189,14 +14157,14 @@ STAGE PLANS: keys: KEY._col0 (type: string) mode: complete outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), UDFToInteger(_col1) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 9 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 52 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -14372,48 +14340,48 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(key)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 20 Data size: 140 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 160 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -14428,15 +14396,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: double) sort order: + - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -14500,14 +14468,14 @@ STAGE PLANS: $hdt$_0-subquery1:$hdt$_0-subquery1:$hdt$_0:a TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -14518,33 +14486,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -14552,23 +14520,23 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -14576,10 +14544,10 @@ STAGE PLANS: keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -14643,14 +14611,14 @@ STAGE PLANS: $hdt$_0-subquery2:$hdt$_0-subquery2:$hdt$_0:a TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -14661,33 +14629,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -14695,23 +14663,23 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double) sort order: + Map-reduce partition columns: _col0 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -14719,10 +14687,10 @@ STAGE PLANS: keys: KEY._col0 (type: double) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -14786,14 +14754,14 @@ STAGE PLANS: $hdt$_0-subquery1:$hdt$_0-subquery1:$hdt$_0:a TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -14804,33 +14772,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double), key (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double), _col1 (type: string) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -14838,23 +14806,23 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToString(UDFToDouble(_col1)) (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: double), _col1 (type: string) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -14862,10 +14830,10 @@ STAGE PLANS: keys: KEY._col0 (type: double), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -14935,14 +14903,14 @@ STAGE PLANS: $hdt$_0-subquery2:$hdt$_0-subquery2:$hdt$_0:a TableScan alias: a - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col0 (type: string) @@ -14953,33 +14921,33 @@ STAGE PLANS: Map Operator Tree: TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(key) (type: double), UDFToDouble(key) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double), _col1 (type: double) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: double), _col1 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE TableScan alias: t2 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 80 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -14987,23 +14955,23 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: UDFToDouble(UDFToLong(_col0)) (type: double), UDFToDouble(_col1) (type: double) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 77 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 88 Basic stats: COMPLETE Column stats: NONE Union - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: double), _col1 (type: double) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: double), _col1 (type: double) sort order: ++ Map-reduce partition columns: _col0 (type: double), _col1 (type: double) - Statistics: Num rows: 21 Data size: 147 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 168 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: @@ -15011,10 +14979,10 @@ STAGE PLANS: keys: KEY._col0 (type: double), KEY._col1 (type: double) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 70 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 84 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/union_fast_stats.q.out b/ql/src/test/results/clientpositive/union_fast_stats.q.out index f0879af..10de5dc 100644 --- a/ql/src/test/results/clientpositive/union_fast_stats.q.out +++ b/ql/src/test/results/clientpositive/union_fast_stats.q.out @@ -176,10 +176,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 3 - numRows 15 - rawDataSize 3483 totalSize 4003 #### A masked pattern was here #### @@ -509,10 +506,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 1 - numRows 15 - rawDataSize 3483 totalSize 3223 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_ppr.q.out b/ql/src/test/results/clientpositive/union_ppr.q.out index 11c6dce..cba62f2 100644 --- a/ql/src/test/results/clientpositive/union_ppr.q.out +++ b/ql/src/test/results/clientpositive/union_ppr.q.out @@ -204,17 +204,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -250,17 +255,22 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns key,value columns.comments 'default','default' columns.types string:string #### A masked pattern was here #### name default.srcpart + numFiles 0 + numRows 0 partition_columns ds/hr partition_columns.types string:string + rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart diff --git a/ql/src/test/results/clientpositive/union_remove_1.q.out b/ql/src/test/results/clientpositive/union_remove_1.q.out index 1bd471d..0b6987e 100644 --- a/ql/src/test/results/clientpositive/union_remove_1.q.out +++ b/ql/src/test/results/clientpositive/union_remove_1.q.out @@ -193,6 +193,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 40 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_10.q.out b/ql/src/test/results/clientpositive/union_remove_10.q.out index 14645f0..c8a9dd7 100644 --- a/ql/src/test/results/clientpositive/union_remove_10.q.out +++ b/ql/src/test/results/clientpositive/union_remove_10.q.out @@ -252,6 +252,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 3 + numRows 0 + rawDataSize 0 totalSize 271 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_11.q.out b/ql/src/test/results/clientpositive/union_remove_11.q.out index 5696383..f36f0c9 100644 --- a/ql/src/test/results/clientpositive/union_remove_11.q.out +++ b/ql/src/test/results/clientpositive/union_remove_11.q.out @@ -241,6 +241,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 115 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_12.q.out b/ql/src/test/results/clientpositive/union_remove_12.q.out index 2b42538..51449a9 100644 --- a/ql/src/test/results/clientpositive/union_remove_12.q.out +++ b/ql/src/test/results/clientpositive/union_remove_12.q.out @@ -240,6 +240,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 194 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_13.q.out b/ql/src/test/results/clientpositive/union_remove_13.q.out index c7063cd..b18660b 100644 --- a/ql/src/test/results/clientpositive/union_remove_13.q.out +++ b/ql/src/test/results/clientpositive/union_remove_13.q.out @@ -263,6 +263,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 192 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_14.q.out b/ql/src/test/results/clientpositive/union_remove_14.q.out index a754dd4..c35d97b 100644 --- a/ql/src/test/results/clientpositive/union_remove_14.q.out +++ b/ql/src/test/results/clientpositive/union_remove_14.q.out @@ -242,6 +242,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 194 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_15.q.out b/ql/src/test/results/clientpositive/union_remove_15.q.out index a259df8..5933b60 100644 --- a/ql/src/test/results/clientpositive/union_remove_15.q.out +++ b/ql/src/test/results/clientpositive/union_remove_15.q.out @@ -216,6 +216,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/union_remove_16.q.out b/ql/src/test/results/clientpositive/union_remove_16.q.out index c7a08f3..48c5232 100644 --- a/ql/src/test/results/clientpositive/union_remove_16.q.out +++ b/ql/src/test/results/clientpositive/union_remove_16.q.out @@ -248,6 +248,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/union_remove_17.q.out b/ql/src/test/results/clientpositive/union_remove_17.q.out index 688e365..4b8e2f5 100644 --- a/ql/src/test/results/clientpositive/union_remove_17.q.out +++ b/ql/src/test/results/clientpositive/union_remove_17.q.out @@ -171,6 +171,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/union_remove_18.q.out b/ql/src/test/results/clientpositive/union_remove_18.q.out index 96daa12..4513577 100644 --- a/ql/src/test/results/clientpositive/union_remove_18.q.out +++ b/ql/src/test/results/clientpositive/union_remove_18.q.out @@ -226,6 +226,11 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 0 + numRows 0 + rawDataSize 0 + totalSize 0 #### A masked pattern was here #### # Storage Information diff --git a/ql/src/test/results/clientpositive/union_remove_19.q.out b/ql/src/test/results/clientpositive/union_remove_19.q.out index 1a40efa..3d8dcca 100644 --- a/ql/src/test/results/clientpositive/union_remove_19.q.out +++ b/ql/src/test/results/clientpositive/union_remove_19.q.out @@ -197,6 +197,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 40 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_2.q.out b/ql/src/test/results/clientpositive/union_remove_2.q.out index e5de3c6..9fc6d55 100644 --- a/ql/src/test/results/clientpositive/union_remove_2.q.out +++ b/ql/src/test/results/clientpositive/union_remove_2.q.out @@ -204,6 +204,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 3 + numRows 0 + rawDataSize 0 totalSize 68 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_20.q.out b/ql/src/test/results/clientpositive/union_remove_20.q.out index 96d76d4..65b616f 100644 --- a/ql/src/test/results/clientpositive/union_remove_20.q.out +++ b/ql/src/test/results/clientpositive/union_remove_20.q.out @@ -203,6 +203,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 40 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_21.q.out b/ql/src/test/results/clientpositive/union_remove_21.q.out index 1356777..d0f5a3b 100644 --- a/ql/src/test/results/clientpositive/union_remove_21.q.out +++ b/ql/src/test/results/clientpositive/union_remove_21.q.out @@ -187,6 +187,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 20 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_22.q.out b/ql/src/test/results/clientpositive/union_remove_22.q.out index 933a0e8..937462f 100644 --- a/ql/src/test/results/clientpositive/union_remove_22.q.out +++ b/ql/src/test/results/clientpositive/union_remove_22.q.out @@ -207,6 +207,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 60 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_23.q.out b/ql/src/test/results/clientpositive/union_remove_23.q.out index b52ffc2..3c0faf8 100644 --- a/ql/src/test/results/clientpositive/union_remove_23.q.out +++ b/ql/src/test/results/clientpositive/union_remove_23.q.out @@ -243,6 +243,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 40 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_24.q.out b/ql/src/test/results/clientpositive/union_remove_24.q.out index 95bf66b..b6f0fa5 100644 --- a/ql/src/test/results/clientpositive/union_remove_24.q.out +++ b/ql/src/test/results/clientpositive/union_remove_24.q.out @@ -199,6 +199,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 60 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_25.q.out b/ql/src/test/results/clientpositive/union_remove_25.q.out index 54ddf56..509befc 100644 --- a/ql/src/test/results/clientpositive/union_remove_25.q.out +++ b/ql/src/test/results/clientpositive/union_remove_25.q.out @@ -214,7 +214,10 @@ Database: default Table: outputtbl1 #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 2 + numRows 0 + rawDataSize 0 totalSize 40 #### A masked pattern was here #### @@ -417,7 +420,10 @@ Database: default Table: outputtbl2 #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 2 + numRows 0 + rawDataSize 0 totalSize 6812 #### A masked pattern was here #### @@ -604,7 +610,10 @@ Database: default Table: outputtbl3 #### A masked pattern was here #### Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} numFiles 2 + numRows 0 + rawDataSize 0 totalSize 6812 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_3.q.out b/ql/src/test/results/clientpositive/union_remove_3.q.out index a95a48e..fffacf0 100644 --- a/ql/src/test/results/clientpositive/union_remove_3.q.out +++ b/ql/src/test/results/clientpositive/union_remove_3.q.out @@ -193,6 +193,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 1 + numRows 0 + rawDataSize 0 totalSize 72 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_4.q.out b/ql/src/test/results/clientpositive/union_remove_4.q.out index 818ae80..d5d6986 100644 --- a/ql/src/test/results/clientpositive/union_remove_4.q.out +++ b/ql/src/test/results/clientpositive/union_remove_4.q.out @@ -237,6 +237,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 40 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_5.q.out b/ql/src/test/results/clientpositive/union_remove_5.q.out index d6c2b99..1825f98 100644 --- a/ql/src/test/results/clientpositive/union_remove_5.q.out +++ b/ql/src/test/results/clientpositive/union_remove_5.q.out @@ -250,6 +250,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 3 + numRows 0 + rawDataSize 0 totalSize 68 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_7.q.out b/ql/src/test/results/clientpositive/union_remove_7.q.out index 5541cb0..e46c365 100644 --- a/ql/src/test/results/clientpositive/union_remove_7.q.out +++ b/ql/src/test/results/clientpositive/union_remove_7.q.out @@ -197,6 +197,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 178 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_8.q.out b/ql/src/test/results/clientpositive/union_remove_8.q.out index 22e6f15..ea77561 100644 --- a/ql/src/test/results/clientpositive/union_remove_8.q.out +++ b/ql/src/test/results/clientpositive/union_remove_8.q.out @@ -208,6 +208,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 3 + numRows 0 + rawDataSize 0 totalSize 271 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/union_remove_9.q.out b/ql/src/test/results/clientpositive/union_remove_9.q.out index 70b5ee9..cad329d 100644 --- a/ql/src/test/results/clientpositive/union_remove_9.q.out +++ b/ql/src/test/results/clientpositive/union_remove_9.q.out @@ -255,6 +255,8 @@ Retention: 0 Table Type: MANAGED_TABLE Table Parameters: numFiles 2 + numRows 0 + rawDataSize 0 totalSize 192 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/unset_table_view_property.q.out b/ql/src/test/results/clientpositive/unset_table_view_property.q.out index 8bbb9fe..a3dec73 100644 --- a/ql/src/test/results/clientpositive/unset_table_view_property.q.out +++ b/ql/src/test/results/clientpositive/unset_table_view_property.q.out @@ -16,6 +16,11 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} +numFiles 0 +numRows 0 +rawDataSize 0 +totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET TABLE PROPERTIES ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3') @@ -35,6 +40,8 @@ a 1 c 3 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET all the properties @@ -53,6 +60,8 @@ POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### PREHOOK: query: ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4') @@ -72,6 +81,8 @@ c 3 d 4 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties @@ -91,6 +102,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES c 3 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- the same property being UNSET multiple times @@ -109,6 +122,8 @@ POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### PREHOOK: query: ALTER TABLE vt.testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4') @@ -129,6 +144,8 @@ c 3 d 4 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS @@ -149,6 +166,8 @@ a 1 c 3 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS @@ -168,6 +187,8 @@ POSTHOOK: type: SHOW_TBLPROPERTIES a 1 #### A masked pattern was here #### numFiles 0 +numRows 0 +rawDataSize 0 totalSize 0 #### A masked pattern was here #### PREHOOK: query: DROP TABLE vt.testTable diff --git a/ql/src/test/results/clientpositive/updateBasicStats.q.out b/ql/src/test/results/clientpositive/updateBasicStats.q.out index 596de00..4f3d605 100644 --- a/ql/src/test/results/clientpositive/updateBasicStats.q.out +++ b/ql/src/test/results/clientpositive/updateBasicStats.q.out @@ -24,11 +24,11 @@ STAGE PLANS: Processor Tree: TableScan alias: s - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 10 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 114 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: alter table s update statistics set('numRows'='12') @@ -53,11 +53,11 @@ STAGE PLANS: Processor Tree: TableScan alias: s - Statistics: Num rows: 12 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 114 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 104 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 114 Basic stats: COMPLETE Column stats: NONE ListSink PREHOOK: query: analyze table s compute statistics diff --git a/ql/src/test/results/clientpositive/vector_between_columns.q.out b/ql/src/test/results/clientpositive/vector_between_columns.q.out index a4e8d64..414cd27 100644 --- a/ql/src/test/results/clientpositive/vector_between_columns.q.out +++ b/ql/src/test/results/clientpositive/vector_between_columns.q.out @@ -92,11 +92,11 @@ STAGE PLANS: $hdt$_0:tint TableScan alias: tint - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 269 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), cint (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 269 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 @@ -107,11 +107,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tsint - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 271 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), csint (type: smallint) outputColumnNames: _col0, _col1 - Statistics: Num rows: 5 Data size: 36 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 271 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -119,17 +119,17 @@ STAGE PLANS: 0 1 outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 5 Data size: 39 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 36 Data size: 295 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: _col1 BETWEEN _col3 AND _col3 (type: boolean) - Statistics: Num rows: 2 Data size: 15 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 147 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col2 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 15 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 147 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 15 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 18 Data size: 147 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_between_in.q.out b/ql/src/test/results/clientpositive/vector_between_in.q.out index 9f351b2..53b2e79 100644 --- a/ql/src/test/results/clientpositive/vector_between_in.q.out +++ b/ql/src/test/results/clientpositive/vector_between_in.q.out @@ -26,27 +26,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cdate) IN (1969-10-26, 1969-07-14) (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -72,12 +72,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (not (cdate) IN (1969-10-26, 1969-07-14, 1970-01-21)) (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -122,27 +122,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cdecimal1) IN (2365.8945945946, 881.0135135135, -3367.6517567568) (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(20,10)) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -168,12 +168,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (not (cdecimal1) IN (2365.8945945946, 881.0135135135, -3367.6517567568)) (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -218,27 +218,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: cdate BETWEEN 1969-12-30 AND 1970-01-02 (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -264,27 +264,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: cdate NOT BETWEEN 1968-05-01 AND 1971-09-01 (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: date) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -310,27 +310,27 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: cdecimal1 BETWEEN -20 AND 45.9918918919 (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(20,10)) sort order: + - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(20,10)) outputColumnNames: _col0 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -356,12 +356,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 (type: boolean) - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Select Operator - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() mode: hash @@ -655,22 +655,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (cdate) IN (1969-10-26, 1969-07-14) (type: boolean) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: boolean) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Map-reduce partition columns: _col0 (type: boolean) - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reduce Operator Tree: @@ -679,10 +679,10 @@ STAGE PLANS: keys: KEY._col0 (type: boolean) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -708,22 +708,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (cdecimal1) IN (2365.8945945946, 881.0135135135, -3367.6517567568) (type: boolean) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: boolean) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Map-reduce partition columns: _col0 (type: boolean) - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reduce Operator Tree: @@ -732,10 +732,10 @@ STAGE PLANS: keys: KEY._col0 (type: boolean) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -761,22 +761,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate BETWEEN 1969-12-30 AND 1970-01-02 (type: boolean) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: boolean) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Map-reduce partition columns: _col0 (type: boolean) - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1924 Data size: 107747 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reduce Operator Tree: @@ -785,10 +785,10 @@ STAGE PLANS: keys: KEY._col0 (type: boolean) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -814,22 +814,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_date_test - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 NOT BETWEEN -2000 AND 4390.1351351351 (type: boolean) outputColumnNames: _col0 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(1) keys: _col0 (type: boolean) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: boolean) sort order: + Map-reduce partition columns: _col0 (type: boolean) - Statistics: Num rows: 12288 Data size: 2467616 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 962 Data size: 107747 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reduce Operator Tree: @@ -838,10 +838,10 @@ STAGE PLANS: keys: KEY._col0 (type: boolean) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6144 Data size: 1233808 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 481 Data size: 53873 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out b/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out index 000a501..095ccaa 100644 --- a/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out +++ b/ql/src/test/results/clientpositive/vector_char_mapjoin1.q.out @@ -146,14 +146,14 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: char(10)) @@ -164,14 +164,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -179,11 +179,11 @@ STAGE PLANS: 0 _col1 (type: char(10)) 1 _col1 (type: char(10)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: char(10)) Execution mode: vectorized Local Work: @@ -192,10 +192,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(10)), VALUE._col1 (type: int), VALUE._col2 (type: char(10)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -246,14 +246,14 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: char(20)) @@ -264,14 +264,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 13 Data size: 324 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 13 Data size: 324 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(20)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 324 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 13 Data size: 324 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -279,11 +279,11 @@ STAGE PLANS: 0 _col1 (type: char(20)) 1 _col1 (type: char(20)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: char(20)) Execution mode: vectorized Local Work: @@ -292,10 +292,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(10)), VALUE._col1 (type: int), VALUE._col2 (type: char(20)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -348,14 +348,14 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 UDFToString(_col1) (type: string) @@ -366,14 +366,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: char(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 294 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 322 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -381,11 +381,11 @@ STAGE PLANS: 0 UDFToString(_col1) (type: string) 1 _col1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: char(10)), _col2 (type: int), _col3 (type: string) Execution mode: vectorized Local Work: @@ -394,10 +394,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: char(10)), VALUE._col1 (type: int), VALUE._col2 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 323 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 25 Data size: 354 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out b/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out index 2ee396b..a550d36 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_10_0.q.out @@ -49,24 +49,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: dec (type: decimal(10,0)) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(10,0)) sort order: + - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(10,0)) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2 Data size: 112 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 231 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out index b022435..893f446 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out @@ -50,22 +50,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_vgby - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) outputColumnNames: cint, cdecimal1, cdecimal2 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), count() keys: cint (type: int) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)), _col9 (type: bigint) Execution mode: vectorized Reduce Operator Tree: @@ -74,17 +74,17 @@ STAGE PLANS: keys: KEY._col0 (type: int) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 280 Data size: 63891 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col9 > 1) (type: boolean) - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 (type: decimal(23,14)), _col8 (type: decimal(33,14)) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8 - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -148,22 +148,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_vgby - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) outputColumnNames: cint, cdecimal1, cdecimal2 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cdecimal1), max(cdecimal1), min(cdecimal1), sum(cdecimal1), avg(cdecimal1), stddev_pop(cdecimal1), stddev_samp(cdecimal1), count(cdecimal2), max(cdecimal2), min(cdecimal2), sum(cdecimal2), avg(cdecimal2), stddev_pop(cdecimal2), stddev_samp(cdecimal2), count() keys: cint (type: int) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 12288 Data size: 2165060 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 560 Data size: 127782 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: struct), _col6 (type: struct), _col7 (type: struct), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 (type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: struct), _col13 (type: struct), _col14 (type: struct), _col15 (type: bigint) Execution mode: vectorized Reduce Operator Tree: @@ -172,17 +172,17 @@ STAGE PLANS: keys: KEY._col0 (type: int) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15 - Statistics: Num rows: 6144 Data size: 1082530 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 280 Data size: 63891 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (_col15 > 1) (type: boolean) - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: bigint), _col2 (type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: decimal(30,10)), _col5 (type: decimal(24,14)), _col6 (type: double), _col7 (type: double), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 (type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: decimal(27,18)), _col13 (type: double), _col14 (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14 - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2048 Data size: 360843 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 21220 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out index 9244efd..ee36c31 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_expressions.q.out @@ -33,31 +33,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_test - Statistics: Num rows: 12288 Data size: 2128368 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 444 Data size: 103076 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((cdecimal1 > 0) and (UDFToDouble(cdecimal1) < 12345.5678) and (cdecimal2 <> 0) and (cdecimal2 > 1000) and cdouble is not null) (type: boolean) - Statistics: Num rows: 455 Data size: 78809 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3714 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: (cdecimal1 + cdecimal2) (type: decimal(25,14)), (cdecimal1 - (2 * cdecimal2)) (type: decimal(26,14)), ((UDFToDouble(cdecimal1) + 2.34) / UDFToDouble(cdecimal2)) (type: double), (UDFToDouble(cdecimal1) * (UDFToDouble(cdecimal2) / 3.4)) (type: double), (cdecimal1 % 10) (type: decimal(12,10)), UDFToInteger(cdecimal1) (type: int), UDFToShort(cdecimal2) (type: smallint), UDFToByte(cdecimal2) (type: tinyint), UDFToLong(cdecimal1) (type: bigint), UDFToBoolean(cdecimal1) (type: boolean), UDFToDouble(cdecimal2) (type: double), UDFToFloat(cdecimal1) (type: float), UDFToString(cdecimal2) (type: string), CAST( cdecimal1 AS TIMESTAMP) (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 455 Data size: 78809 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3714 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: decimal(25,14)), _col1 (type: decimal(26,14)), _col2 (type: double), _col3 (type: double), _col4 (type: decimal(12,10)), _col5 (type: int), _col6 (type: smallint), _col7 (type: tinyint), _col8 (type: bigint), _col9 (type: boolean), _col10 (type: double), _col11 (type: float), _col12 (type: string), _col13 (type: timestamp) sort order: ++++++++++++++ - Statistics: Num rows: 455 Data size: 78809 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3714 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: decimal(25,14)), KEY.reducesinkkey1 (type: decimal(26,14)), KEY.reducesinkkey2 (type: double), KEY.reducesinkkey3 (type: double), KEY.reducesinkkey4 (type: decimal(12,10)), KEY.reducesinkkey5 (type: int), KEY.reducesinkkey6 (type: smallint), KEY.reducesinkkey7 (type: tinyint), KEY.reducesinkkey8 (type: bigint), KEY.reducesinkkey9 (type: boolean), KEY.reducesinkkey10 (type: double), KEY.reducesinkkey11 (type: float), KEY.reducesinkkey12 (type: string), KEY.reducesinkkey13 (type: timestamp) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13 - Statistics: Num rows: 455 Data size: 78809 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 16 Data size: 3714 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 1730 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2320 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 1730 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 2320 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out b/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out index 0b70d4c..ff425bb 100644 --- a/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out +++ b/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out @@ -100,17 +100,17 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_test - Statistics: Num rows: 12288 Data size: 2201752 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1064 Data size: 127784 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (((cbigint % 500) = 0) and (sin(cdecimal1) >= -1.0)) (type: boolean) - Statistics: Num rows: 2048 Data size: 366958 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 177 Data size: 21257 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdecimal1 (type: decimal(20,10)), round(cdecimal1, 2) (type: decimal(13,2)), round(cdecimal1) (type: decimal(11,0)), floor(cdecimal1) (type: decimal(11,0)), ceil(cdecimal1) (type: decimal(11,0)), round(exp(cdecimal1), 58) (type: double), ln(cdecimal1) (type: double), log10(cdecimal1) (type: double), log2(cdecimal1) (type: double), log2((UDFToDouble(cdecimal1) - 15601.0)) (type: double), log(2.0, cdecimal1) (type: double), power(log2(cdecimal1), 2.0) (type: double), power(log2(cdecimal1), 2.0) (type: double), sqrt(cdecimal1) (type: double), abs(cdecimal1) (type: decimal(20,10)), sin(cdecimal1) (type: double), asin(cdecimal1) (type: double), cos(cdecimal1) (type: double), acos(cdecimal1) (type: double), atan(cdecimal1) (type: double), degrees(cdecimal1) (type: double), radians(cdecimal1) (type: double), cdecimal1 (type: decimal(20,10)), (- cdecimal1) (type: decimal(20,10)), sign(cdecimal1) (type: int), cos(((- sin(log(cdecimal1))) + 3.14159)) (type: double) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, _col25 - Statistics: Num rows: 2048 Data size: 366958 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 177 Data size: 21257 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 2048 Data size: 366958 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 177 Data size: 21257 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_grouping_sets.q.out b/ql/src/test/results/clientpositive/vector_grouping_sets.q.out index 4207c19..de52b49 100644 --- a/ql/src/test/results/clientpositive/vector_grouping_sets.q.out +++ b/ql/src/test/results/clientpositive/vector_grouping_sets.q.out @@ -147,31 +147,31 @@ STAGE PLANS: Map Operator Tree: TableScan alias: store - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: s_store_id (type: string) outputColumnNames: s_store_id - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: s_store_id (type: string), '0' (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 51264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 88 Data size: 8976 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 24 Data size: 51264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 88 Data size: 8976 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE pruneGroupingSetId: true File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -222,34 +222,34 @@ STAGE PLANS: Map Operator Tree: TableScan alias: store - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: s_store_id (type: string) outputColumnNames: s_store_id - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: s_store_id (type: string), '0' (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 51264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 88 Data size: 8976 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string), _col1 (type: string) sort order: ++ Map-reduce partition columns: _col0 (type: string), _col1 (type: string) - Statistics: Num rows: 24 Data size: 51264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 88 Data size: 8976 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Group By Operator keys: KEY._col0 (type: string), KEY._col1 (type: string) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 25632 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 44 Data size: 4488 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out b/ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out index 2223e81..04358a6 100644 --- a/ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/vector_interval_mapjoin.q.out @@ -196,14 +196,14 @@ STAGE PLANS: $hdt$_1:vectortab_b_1korc TableScan alias: vectortab_b_1korc - Statistics: Num rows: 1000 Data size: 458448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 257 Data size: 50442 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (s is not null and (dt - CAST( ts AS DATE)) is not null) (type: boolean) - Statistics: Num rows: 1000 Data size: 458448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 257 Data size: 50442 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: s (type: string), (dt - CAST( ts AS DATE)) (type: interval_day_time) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1000 Data size: 458448 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 257 Data size: 50442 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: interval_day_time), _col0 (type: string) @@ -214,14 +214,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vectortab_a_1korc - Statistics: Num rows: 1000 Data size: 460264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 258 Data size: 50720 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (s is not null and (dt - CAST( ts AS DATE)) is not null) (type: boolean) - Statistics: Num rows: 1000 Data size: 460264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 258 Data size: 50720 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: s (type: string), (dt - CAST( ts AS DATE)) (type: interval_day_time) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1000 Data size: 460264 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 258 Data size: 50720 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -229,14 +229,14 @@ STAGE PLANS: 0 _col1 (type: interval_day_time), _col0 (type: string) 1 _col1 (type: interval_day_time), _col0 (type: string) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1100 Data size: 506290 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 283 Data size: 55792 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: string), _col2 (type: string), _col1 (type: interval_day_time) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 1100 Data size: 506290 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 283 Data size: 55792 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1100 Data size: 506290 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 283 Data size: 55792 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_join30.q.out b/ql/src/test/results/clientpositive/vector_join30.q.out index 45ed894..e7d5dac 100644 --- a/ql/src/test/results/clientpositive/vector_join30.q.out +++ b/ql/src/test/results/clientpositive/vector_join30.q.out @@ -48,24 +48,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -101,7 +101,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -163,7 +163,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -186,13 +186,13 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -202,7 +202,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -220,25 +220,25 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -303,21 +303,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -353,7 +353,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -398,13 +398,13 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -414,7 +414,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -432,22 +432,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -512,21 +512,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -562,7 +562,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -607,13 +607,13 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) Reduce Operator Tree: Join Operator @@ -623,7 +623,7 @@ STAGE PLANS: 0 _col0 (type: string) 1 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 550 Data size: 96800 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 33 Data size: 3302 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -641,22 +641,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -732,24 +732,24 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -798,7 +798,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -873,7 +873,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -926,7 +926,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -949,20 +949,20 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -973,7 +973,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -991,25 +991,25 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1022,25 +1022,25 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1120,21 +1120,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1183,7 +1183,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1258,7 +1258,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1281,20 +1281,20 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1305,7 +1305,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1323,22 +1323,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1351,22 +1351,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1444,21 +1444,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1507,7 +1507,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1552,20 +1552,20 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1576,7 +1576,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1594,22 +1594,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1622,22 +1622,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1715,21 +1715,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1778,7 +1778,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1823,20 +1823,20 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1847,7 +1847,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -1865,22 +1865,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1893,22 +1893,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1986,21 +1986,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -2049,7 +2049,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -2094,20 +2094,20 @@ STAGE PLANS: key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 3002 Basic stats: COMPLETE Column stats: NONE TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: string) TableScan Reduce Output Operator key expressions: _col0 (type: string) sort order: + Map-reduce partition columns: _col0 (type: string) - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -2118,7 +2118,7 @@ STAGE PLANS: 1 _col0 (type: string) 2 _col0 (type: string) outputColumnNames: _col2, _col3 - Statistics: Num rows: 1100 Data size: 193600 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 66 Data size: 6604 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(hash(_col2,_col3)) mode: hash @@ -2136,22 +2136,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -2164,22 +2164,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orcsrc - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: string), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col1 (type: string) sort order: + - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE value expressions: _col0 (type: string) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 500 Data size: 88000 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 15 Data size: 3002 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: diff --git a/ql/src/test/results/clientpositive/vector_leftsemi_mapjoin.q.out b/ql/src/test/results/clientpositive/vector_leftsemi_mapjoin.q.out index 485e352..8e4c5d7 100644 --- a/ql/src/test/results/clientpositive/vector_leftsemi_mapjoin.q.out +++ b/ql/src/test/results/clientpositive/vector_leftsemi_mapjoin.q.out @@ -152,19 +152,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -175,10 +175,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -186,21 +186,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -248,19 +248,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -271,10 +271,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -282,21 +282,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -369,10 +369,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -380,21 +380,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -459,10 +459,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -470,25 +470,25 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -541,19 +541,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -564,10 +564,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -575,21 +575,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -657,10 +657,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -668,25 +668,25 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -731,19 +731,19 @@ STAGE PLANS: b:t2 TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -754,10 +754,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -765,25 +765,25 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -825,19 +825,19 @@ STAGE PLANS: b:t1 TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -848,10 +848,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -859,21 +859,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -920,19 +920,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -954,21 +954,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1029,19 +1029,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1052,10 +1052,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -1063,21 +1063,21 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1126,10 +1126,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1162,10 +1162,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -1252,19 +1252,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: string) @@ -1359,19 +1359,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1380,19 +1380,19 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1417,21 +1417,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1492,7 +1492,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1501,16 +1501,16 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1532,21 +1532,21 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1611,12 +1611,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: b Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE @@ -1627,21 +1627,21 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -1652,7 +1652,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -1667,15 +1667,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1751,16 +1751,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1769,7 +1769,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1791,21 +1791,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1890,16 +1890,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -1911,7 +1911,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -1921,21 +1921,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2013,29 +2013,29 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -2046,7 +2046,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -2061,15 +2061,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2158,19 +2158,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -2178,7 +2178,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -2200,7 +2200,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -2208,21 +2208,21 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2301,19 +2301,19 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -2339,10 +2339,10 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2386,19 +2386,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -2409,10 +2409,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2420,21 +2420,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2482,19 +2482,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -2505,10 +2505,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2516,21 +2516,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2603,10 +2603,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2614,21 +2614,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2693,10 +2693,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2704,25 +2704,25 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2775,19 +2775,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -2798,10 +2798,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2809,21 +2809,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2891,10 +2891,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2902,25 +2902,25 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -2965,19 +2965,19 @@ STAGE PLANS: b:t2 TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -2988,10 +2988,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -2999,25 +2999,25 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3059,19 +3059,19 @@ STAGE PLANS: b:t1 TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -3082,10 +3082,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -3093,21 +3093,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3154,19 +3154,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -3188,21 +3188,21 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3263,19 +3263,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -3286,10 +3286,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -3297,21 +3297,21 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3360,10 +3360,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -3396,10 +3396,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -3486,19 +3486,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: string) @@ -3593,19 +3593,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -3614,19 +3614,19 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -3651,21 +3651,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3726,7 +3726,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -3735,16 +3735,16 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -3766,21 +3766,21 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3845,12 +3845,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: b Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE @@ -3861,21 +3861,21 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -3886,7 +3886,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -3901,15 +3901,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -3985,16 +3985,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -4003,7 +4003,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -4025,21 +4025,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4124,16 +4124,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -4145,7 +4145,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4155,21 +4155,21 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4247,29 +4247,29 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -4280,7 +4280,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -4295,15 +4295,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4392,19 +4392,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -4412,7 +4412,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -4434,7 +4434,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -4442,21 +4442,21 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Local Work: Map Reduce Local Work Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4535,19 +4535,19 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -4573,10 +4573,10 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4620,19 +4620,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -4643,10 +4643,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4654,11 +4654,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -4666,10 +4666,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4717,19 +4717,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -4740,10 +4740,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4751,11 +4751,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -4763,10 +4763,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4839,10 +4839,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4850,11 +4850,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -4862,10 +4862,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -4930,10 +4930,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -4941,15 +4941,15 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -4957,10 +4957,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5013,19 +5013,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5036,10 +5036,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5047,11 +5047,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -5059,10 +5059,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5130,10 +5130,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5141,15 +5141,15 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -5157,10 +5157,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5205,19 +5205,19 @@ STAGE PLANS: b:t2 TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5228,10 +5228,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5239,15 +5239,15 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -5255,10 +5255,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5300,19 +5300,19 @@ STAGE PLANS: b:t1 TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5323,10 +5323,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5334,11 +5334,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -5346,10 +5346,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5396,19 +5396,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5430,11 +5430,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -5442,10 +5442,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5506,19 +5506,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5529,10 +5529,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -5540,11 +5540,11 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -5552,10 +5552,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5604,10 +5604,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5640,10 +5640,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -5731,19 +5731,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: string) @@ -5839,19 +5839,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5860,19 +5860,19 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5897,11 +5897,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -5909,10 +5909,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -5973,7 +5973,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -5982,16 +5982,16 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -6013,11 +6013,11 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -6025,10 +6025,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6093,12 +6093,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: b Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE @@ -6109,21 +6109,21 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -6134,7 +6134,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -6149,15 +6149,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6233,16 +6233,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -6251,7 +6251,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -6273,11 +6273,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -6285,10 +6285,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6373,16 +6373,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -6394,7 +6394,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -6404,11 +6404,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -6416,10 +6416,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6497,29 +6497,29 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -6530,7 +6530,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -6545,15 +6545,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6642,19 +6642,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -6662,7 +6662,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -6684,7 +6684,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -6692,11 +6692,11 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -6704,10 +6704,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6786,19 +6786,19 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -6824,10 +6824,10 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6872,19 +6872,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -6895,10 +6895,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -6906,11 +6906,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -6918,10 +6918,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -6969,19 +6969,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -6992,10 +6992,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7003,11 +7003,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7015,10 +7015,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7091,10 +7091,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7102,11 +7102,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7114,10 +7114,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7182,10 +7182,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7193,15 +7193,15 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7209,10 +7209,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7265,19 +7265,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -7288,10 +7288,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7299,11 +7299,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7311,10 +7311,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7382,10 +7382,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7393,15 +7393,15 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7409,10 +7409,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7457,19 +7457,19 @@ STAGE PLANS: b:t2 TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -7480,10 +7480,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7491,15 +7491,15 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7507,10 +7507,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7552,19 +7552,19 @@ STAGE PLANS: b:t1 TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -7575,10 +7575,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7586,11 +7586,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7598,10 +7598,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7648,19 +7648,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -7682,11 +7682,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7694,10 +7694,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7758,19 +7758,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -7781,10 +7781,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -7792,11 +7792,11 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -7804,10 +7804,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -7856,10 +7856,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -7892,10 +7892,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -7983,19 +7983,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: string) @@ -8091,19 +8091,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -8112,19 +8112,19 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -8149,11 +8149,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -8161,10 +8161,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8225,7 +8225,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -8234,16 +8234,16 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -8265,11 +8265,11 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -8277,10 +8277,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8345,12 +8345,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: b Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE @@ -8361,21 +8361,21 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -8386,7 +8386,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -8401,15 +8401,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8485,16 +8485,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -8503,7 +8503,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -8525,11 +8525,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -8537,10 +8537,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8625,16 +8625,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -8646,7 +8646,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -8656,11 +8656,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -8668,10 +8668,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8749,29 +8749,29 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -8782,7 +8782,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -8797,15 +8797,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -8894,19 +8894,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -8914,7 +8914,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -8936,7 +8936,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -8944,11 +8944,11 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -8956,10 +8956,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9038,19 +9038,19 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -9076,10 +9076,10 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9124,19 +9124,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -9147,10 +9147,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9158,11 +9158,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9170,10 +9170,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9221,19 +9221,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -9244,10 +9244,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9255,11 +9255,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9267,10 +9267,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9343,10 +9343,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9354,11 +9354,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9366,10 +9366,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9434,10 +9434,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9445,15 +9445,15 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9461,10 +9461,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9517,19 +9517,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -9540,10 +9540,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9551,11 +9551,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9563,10 +9563,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9634,10 +9634,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9645,15 +9645,15 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9661,10 +9661,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9709,19 +9709,19 @@ STAGE PLANS: b:t2 TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -9732,10 +9732,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9743,15 +9743,15 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9759,10 +9759,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9804,19 +9804,19 @@ STAGE PLANS: b:t1 TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -9827,10 +9827,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -9838,11 +9838,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9850,10 +9850,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -9900,19 +9900,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -9934,11 +9934,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -9946,10 +9946,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10010,19 +10010,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10033,10 +10033,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -10044,11 +10044,11 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -10056,10 +10056,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10108,10 +10108,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10144,10 +10144,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -10235,19 +10235,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: string) @@ -10343,19 +10343,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10364,19 +10364,19 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10401,11 +10401,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -10413,10 +10413,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10477,7 +10477,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10486,16 +10486,16 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10517,11 +10517,11 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -10529,10 +10529,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10597,12 +10597,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: b Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE @@ -10613,21 +10613,21 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -10638,7 +10638,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -10653,15 +10653,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10737,16 +10737,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10755,7 +10755,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10777,11 +10777,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -10789,10 +10789,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -10877,16 +10877,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -10898,7 +10898,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -10908,11 +10908,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -10920,10 +10920,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11001,29 +11001,29 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -11034,7 +11034,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -11049,15 +11049,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11146,19 +11146,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -11166,7 +11166,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -11188,7 +11188,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -11196,11 +11196,11 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -11208,10 +11208,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11290,19 +11290,19 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -11328,10 +11328,10 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11376,19 +11376,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -11399,10 +11399,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11410,11 +11410,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -11422,10 +11422,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11473,19 +11473,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -11496,10 +11496,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11507,11 +11507,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -11519,10 +11519,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11595,10 +11595,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11606,11 +11606,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -11618,10 +11618,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11686,10 +11686,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11697,15 +11697,15 @@ STAGE PLANS: 0 key (type: int) 1 _col1 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -11713,10 +11713,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11769,19 +11769,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((value < 'val_10') and key is not null) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -11792,10 +11792,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11803,11 +11803,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -11815,10 +11815,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11886,10 +11886,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11897,15 +11897,15 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -11913,10 +11913,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -11961,19 +11961,19 @@ STAGE PLANS: b:t2 TableScan alias: t2 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((key > 5) and (value <= 'val_20')) (type: boolean) - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 1 Data size: 93 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -11984,10 +11984,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -11995,15 +11995,15 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col1 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: string) sort order: + - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -12011,10 +12011,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12056,19 +12056,19 @@ STAGE PLANS: b:t1 TableScan alias: t1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key > 2) (type: boolean) - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 3 Data size: 279 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 28 Data size: 112 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -12079,10 +12079,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -12090,11 +12090,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -12102,10 +12102,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 30 Data size: 123 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12152,19 +12152,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -12186,11 +12186,11 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -12198,10 +12198,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 94 Data size: 378 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12262,19 +12262,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (2 * key) is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -12285,10 +12285,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -12296,11 +12296,11 @@ STAGE PLANS: 0 key (type: int) 1 (2 * _col0) (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: string) sort order: ++ - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -12308,10 +12308,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12360,10 +12360,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -12396,10 +12396,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -12487,19 +12487,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (key is not null and value is not null) (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int), value (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int), _col1 (type: string) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: string) @@ -12595,19 +12595,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -12616,19 +12616,19 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -12653,11 +12653,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -12665,10 +12665,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12729,7 +12729,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -12738,16 +12738,16 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -12769,11 +12769,11 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -12781,10 +12781,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12849,12 +12849,12 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: b Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE @@ -12865,21 +12865,21 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -12890,7 +12890,7 @@ STAGE PLANS: 1 key (type: int) 2 _col0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -12905,15 +12905,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -12989,16 +12989,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -13007,7 +13007,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -13029,11 +13029,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -13041,10 +13041,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13129,16 +13129,16 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -13150,7 +13150,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Semi Join 0 to 1 @@ -13160,11 +13160,11 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -13172,10 +13172,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13253,29 +13253,29 @@ STAGE PLANS: Statistics: Num rows: 22 Data size: 2046 Basic stats: COMPLETE Column stats: NONE TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + Map-reduce partition columns: _col0 (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 86 Data size: 344 Basic stats: COMPLETE Column stats: NONE TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: key (type: int) sort order: + Map-reduce partition columns: key (type: int) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Join Operator condition map: @@ -13286,7 +13286,7 @@ STAGE PLANS: 1 _col0 (type: int) 2 key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -13301,15 +13301,15 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 48 Data size: 4501 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 191 Data size: 765 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13398,19 +13398,19 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: key (type: int) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: int) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 87 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -13418,7 +13418,7 @@ STAGE PLANS: c TableScan alias: c - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 344 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -13440,7 +13440,7 @@ STAGE PLANS: 0 key (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 24 Data size: 2250 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 95 Data size: 382 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -13448,11 +13448,11 @@ STAGE PLANS: 0 _col1 (type: string) 1 value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE Execution mode: vectorized Local Work: Map Reduce Local Work @@ -13460,10 +13460,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 26 Data size: 2475 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 104 Data size: 420 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -13542,19 +13542,19 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: value (type: string) outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE Group By Operator keys: _col0 (type: string) mode: hash outputColumnNames: _col0 - Statistics: Num rows: 11 Data size: 1023 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3 Data size: 348 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: string) @@ -13580,10 +13580,10 @@ STAGE PLANS: 0 _col1 (type: string) 1 _col0 (type: string) outputColumnNames: _col0 - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 12 Data size: 1125 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 7 Data size: 716 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_nullsafe_join.q.out b/ql/src/test/results/clientpositive/vector_nullsafe_join.q.out index 2090c24..8cc0eea 100644 --- a/ql/src/test/results/clientpositive/vector_nullsafe_join.q.out +++ b/ql/src/test/results/clientpositive/vector_nullsafe_join.q.out @@ -70,7 +70,7 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -81,7 +81,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -90,14 +90,14 @@ STAGE PLANS: 1 value (type: int) nullSafes: [true] outputColumnNames: _col0, _col1, _col5, _col6 - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -154,10 +154,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -166,10 +166,10 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -181,10 +181,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -194,14 +194,14 @@ STAGE PLANS: 1 value (type: int) 2 key (type: int) outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -249,7 +249,7 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -258,7 +258,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -270,7 +270,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -281,14 +281,14 @@ STAGE PLANS: 2 key (type: int) nullSafes: [true] outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -363,10 +363,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: int) @@ -375,10 +375,10 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: int) @@ -390,10 +390,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -404,14 +404,14 @@ STAGE PLANS: 2 key (type: int), value (type: int) nullSafes: [true, false] outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -459,7 +459,7 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: int) @@ -468,7 +468,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: int) @@ -480,7 +480,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -491,14 +491,14 @@ STAGE PLANS: 2 key (type: int), value (type: int) nullSafes: [true, true] outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -650,7 +650,7 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -661,7 +661,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -670,14 +670,14 @@ STAGE PLANS: 1 value (type: int) nullSafes: [true] outputColumnNames: _col0, _col1, _col5, _col6 - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6 Data size: 26 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 37 Data size: 305 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -734,10 +734,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -746,10 +746,10 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -761,10 +761,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -774,14 +774,14 @@ STAGE PLANS: 1 value (type: int) 2 key (type: int) outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -829,7 +829,7 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -838,7 +838,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int) @@ -850,7 +850,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -861,14 +861,14 @@ STAGE PLANS: 2 key (type: int) nullSafes: [true] outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -943,10 +943,10 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: int) @@ -955,10 +955,10 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: key is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: int) @@ -970,10 +970,10 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: value is not null (type: boolean) - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -984,14 +984,14 @@ STAGE PLANS: 2 key (type: int), value (type: int) nullSafes: [true, false] outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1039,7 +1039,7 @@ STAGE PLANS: a TableScan alias: a - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: int) @@ -1048,7 +1048,7 @@ STAGE PLANS: b TableScan alias: b - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 key (type: int), value (type: int) @@ -1060,7 +1060,7 @@ STAGE PLANS: Map Operator Tree: TableScan alias: c - Statistics: Num rows: 6 Data size: 24 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 34 Data size: 278 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -1071,14 +1071,14 @@ STAGE PLANS: 2 key (type: int), value (type: int) nullSafes: [true, true] outputColumnNames: _col0, _col1, _col5, _col6, _col10, _col11 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col5 (type: int), _col6 (type: int), _col10 (type: int), _col11 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 13 Data size: 52 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 74 Data size: 611 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_number_compare_projection.q.out b/ql/src/test/results/clientpositive/vector_number_compare_projection.q.out index 85f7cc8..c446af5 100644 --- a/ql/src/test/results/clientpositive/vector_number_compare_projection.q.out +++ b/ql/src/test/results/clientpositive/vector_number_compare_projection.q.out @@ -127,22 +127,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vectortab2k_orc - Statistics: Num rows: 2001 Data size: 273608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5057 Data size: 60690 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), i (type: int), (t < 0) (type: boolean), (si <= 0) (type: boolean), (i = 0) (type: boolean) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 2001 Data size: 273608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5057 Data size: 60690 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int) sort order: +++ - Statistics: Num rows: 2001 Data size: 273608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5057 Data size: 60690 Basic stats: COMPLETE Column stats: NONE value expressions: _col3 (type: boolean), _col4 (type: boolean), _col5 (type: boolean) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: hash(KEY.reducesinkkey0,KEY.reducesinkkey1,KEY.reducesinkkey2,VALUE._col0,VALUE._col1,VALUE._col2) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 2001 Data size: 273608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 5057 Data size: 60690 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: complete @@ -197,22 +197,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: vectortab2k_orc - Statistics: Num rows: 2001 Data size: 273608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3034 Data size: 60690 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), i (type: int), b (type: bigint), (t > 0) (type: boolean), (si >= 0) (type: boolean), (i <> 0) (type: boolean), (b > 0) (type: boolean) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, _col7 - Statistics: Num rows: 2001 Data size: 273608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3034 Data size: 60690 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: int), _col3 (type: bigint) sort order: ++++ - Statistics: Num rows: 2001 Data size: 273608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3034 Data size: 60690 Basic stats: COMPLETE Column stats: NONE value expressions: _col4 (type: boolean), _col5 (type: boolean), _col6 (type: boolean), _col7 (type: boolean) Execution mode: vectorized Reduce Operator Tree: Select Operator expressions: hash(KEY.reducesinkkey0,KEY.reducesinkkey1,KEY.reducesinkkey2,KEY.reducesinkkey3,VALUE._col0,VALUE._col1,VALUE._col2,VALUE._col3) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 2001 Data size: 273608 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 3034 Data size: 60690 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: complete diff --git a/ql/src/test/results/clientpositive/vector_outer_join6.q.out b/ql/src/test/results/clientpositive/vector_outer_join6.q.out index 8c09716..2cb4ede 100644 --- a/ql/src/test/results/clientpositive/vector_outer_join6.q.out +++ b/ql/src/test/results/clientpositive/vector_outer_join6.q.out @@ -153,11 +153,11 @@ STAGE PLANS: $hdt$_0:$hdt$_1:tjoin2 TableScan alias: tjoin2 - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: int) @@ -165,11 +165,11 @@ STAGE PLANS: $hdt$_1:tjoin3 TableScan alias: tjoin3 - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 342 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 342 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col2 (type: int) @@ -180,11 +180,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tjoin1 - Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 339 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 339 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -192,11 +192,11 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col1 (type: int) outputColumnNames: _col0, _col2, _col3 - Statistics: Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 407 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col2 (type: int), _col3 (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 407 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -204,14 +204,14 @@ STAGE PLANS: 0 _col2 (type: int) 1 _col1 (type: int) outputColumnNames: _col0, _col1, _col3 - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 447 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col1 (type: int), _col3 (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 447 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 55 Data size: 447 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -271,11 +271,11 @@ STAGE PLANS: $hdt$_0:$hdt$_1:tjoin2 TableScan alias: tjoin2 - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 4 Data size: 372 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 46 Data size: 370 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: int) @@ -283,11 +283,11 @@ STAGE PLANS: $hdt$_1:tjoin3 TableScan alias: tjoin3 - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 85 Data size: 342 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int) outputColumnNames: _col0 - Statistics: Num rows: 2 Data size: 188 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 85 Data size: 342 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col2 (type: int) @@ -298,11 +298,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: tjoin1 - Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 339 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: rnum (type: int), c1 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 32 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 42 Data size: 339 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -310,11 +310,11 @@ STAGE PLANS: 0 _col1 (type: int) 1 _col1 (type: int) outputColumnNames: _col0, _col2, _col3 - Statistics: Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 407 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: _col0 (type: int), _col2 (type: int), _col3 (type: int) outputColumnNames: _col0, _col1, _col2 - Statistics: Num rows: 4 Data size: 409 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 50 Data size: 407 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Left Outer Join0 to 1 @@ -322,10 +322,10 @@ STAGE PLANS: 0 _col2 (type: int) 1 _col0 (type: int) outputColumnNames: _col0, _col1 - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 376 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 4 Data size: 449 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 93 Data size: 376 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_partitioned_date_time.q.out b/ql/src/test/results/clientpositive/vector_partitioned_date_time.q.out index 09dd873..268b259 100644 --- a/ql/src/test/results/clientpositive/vector_partitioned_date_time.q.out +++ b/ql/src/test/results/clientpositive/vector_partitioned_date_time.q.out @@ -273,15 +273,15 @@ STAGE PLANS: Map Operator Tree: TableScan alias: flights_tiny_orc - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: origin_city_name (type: string), dest_city_name (type: string), fl_date (type: date), fl_time (type: timestamp), arr_delay (type: float), fl_num (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col5 (type: int), _col2 (type: date) sort order: ++ - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: string), _col1 (type: string), _col3 (type: timestamp), _col4 (type: float) Execution mode: vectorized @@ -289,10 +289,10 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), KEY.reducesinkkey1 (type: date), VALUE._col2 (type: timestamp), VALUE._col3 (type: float), KEY.reducesinkkey0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 25 - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -307,20 +307,20 @@ STAGE PLANS: Reduce Output Operator key expressions: _col5 (type: int), _col2 (type: date) sort order: ++ - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: string), _col1 (type: string), _col3 (type: timestamp), _col4 (type: float) Reduce Operator Tree: Select Operator expressions: VALUE._col0 (type: string), VALUE._col1 (type: string), KEY.reducesinkkey1 (type: date), VALUE._col2 (type: timestamp), VALUE._col3 (type: float), KEY.reducesinkkey0 (type: int) outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5 - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 25 - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 25 Data size: 7200 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 4 Data size: 1457 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -381,22 +381,22 @@ STAGE PLANS: Map Operator Tree: TableScan alias: flights_tiny_orc - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: fl_date (type: date) outputColumnNames: fl_date - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count() keys: fl_date (type: date) mode: hash outputColumnNames: _col0, _col1 - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 1457 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: date) sort order: + Map-reduce partition columns: _col0 (type: date) - Statistics: Num rows: 137 Data size: 39456 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 26 Data size: 1457 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: bigint) Execution mode: vectorized Reduce Operator Tree: @@ -405,10 +405,10 @@ STAGE PLANS: keys: KEY._col0 (type: date) mode: mergepartial outputColumnNames: _col0, _col1 - Statistics: Num rows: 68 Data size: 19584 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 13 Data size: 728 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 68 Data size: 19584 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 13 Data size: 728 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out b/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out index 19a3d50..ae99f62 100644 --- a/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out +++ b/ql/src/test/results/clientpositive/vector_reduce_groupby_decimal.q.out @@ -39,21 +39,21 @@ STAGE PLANS: Map Operator Tree: TableScan alias: decimal_test - Statistics: Num rows: 6102 Data size: 1440072 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 477 Data size: 112623 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cdecimal1 is not null and cdecimal2 is not null) (type: boolean) - Statistics: Num rows: 6102 Data size: 1440072 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 477 Data size: 112623 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: min(cdecimal1) keys: cint (type: int), cdouble (type: double), cdecimal1 (type: decimal(20,10)), cdecimal2 (type: decimal(23,14)) mode: hash outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 6102 Data size: 1440072 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 477 Data size: 112623 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: double), _col2 (type: decimal(20,10)), _col3 (type: decimal(23,14)) sort order: ++++ Map-reduce partition columns: _col0 (type: int), _col1 (type: double), _col2 (type: decimal(20,10)), _col3 (type: decimal(23,14)) - Statistics: Num rows: 6102 Data size: 1440072 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 477 Data size: 112623 Basic stats: COMPLETE Column stats: NONE value expressions: _col4 (type: decimal(20,10)) Execution mode: vectorized Reduce Operator Tree: @@ -62,7 +62,7 @@ STAGE PLANS: keys: KEY._col0 (type: int), KEY._col1 (type: double), KEY._col2 (type: decimal(20,10)), KEY._col3 (type: decimal(23,14)) mode: mergepartial outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 3051 Data size: 720036 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 238 Data size: 56193 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false table: @@ -77,14 +77,14 @@ STAGE PLANS: Reduce Output Operator key expressions: _col0 (type: int), _col1 (type: double), _col2 (type: decimal(20,10)), _col3 (type: decimal(23,14)) sort order: ++++ - Statistics: Num rows: 3051 Data size: 720036 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 238 Data size: 56193 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col4 (type: decimal(20,10)) Reduce Operator Tree: Select Operator expressions: KEY.reducesinkkey0 (type: int), KEY.reducesinkkey1 (type: double), KEY.reducesinkkey2 (type: decimal(20,10)), KEY.reducesinkkey3 (type: decimal(23,14)), VALUE._col0 (type: decimal(20,10)) outputColumnNames: _col0, _col1, _col2, _col3, _col4 - Statistics: Num rows: 3051 Data size: 720036 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 238 Data size: 56193 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 50 Statistics: Num rows: 50 Data size: 11800 Basic stats: COMPLETE Column stats: NONE diff --git a/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out b/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out index 5ca6d70..ec7596f 100644 --- a/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out +++ b/ql/src/test/results/clientpositive/vector_varchar_mapjoin1.q.out @@ -146,14 +146,14 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: varchar(10)) @@ -164,14 +164,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -179,11 +179,11 @@ STAGE PLANS: 0 _col1 (type: varchar(10)) 1 _col1 (type: varchar(10)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: varchar(10)) Execution mode: vectorized Local Work: @@ -192,10 +192,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(10)), VALUE._col1 (type: int), VALUE._col2 (type: varchar(10)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -240,14 +240,14 @@ STAGE PLANS: $hdt$_0:a TableScan alias: a - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 _col1 (type: varchar(20)) @@ -258,14 +258,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: b - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(20)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 12 Data size: 304 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -273,11 +273,11 @@ STAGE PLANS: 0 _col1 (type: varchar(20)) 1 _col1 (type: varchar(20)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: varchar(20)) Execution mode: vectorized Local Work: @@ -286,10 +286,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(10)), VALUE._col1 (type: int), VALUE._col2 (type: varchar(20)) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -336,14 +336,14 @@ STAGE PLANS: $hdt$_1:b TableScan alias: b - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 2 Data size: 300 Basic stats: COMPLETE Column stats: NONE HashTable Sink Operator keys: 0 UDFToString(_col1) (type: string) @@ -354,14 +354,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: a - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: c2 is not null (type: boolean) - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: c1 (type: int), c2 (type: varchar(10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 3 Data size: 273 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 21 Data size: 304 Basic stats: COMPLETE Column stats: NONE Map Join Operator condition map: Inner Join 0 to 1 @@ -369,11 +369,11 @@ STAGE PLANS: 0 UDFToString(_col1) (type: string) 1 _col1 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: int) sort order: + - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE value expressions: _col1 (type: varchar(10)), _col2 (type: int), _col3 (type: string) Execution mode: vectorized Local Work: @@ -382,10 +382,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: int), VALUE._col0 (type: varchar(10)), VALUE._col1 (type: int), VALUE._col2 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 3 Data size: 300 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 23 Data size: 334 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out b/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out index 6cae52c..14f16e7 100644 --- a/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out +++ b/ql/src/test/results/clientpositive/vectorization_decimal_date.q.out @@ -26,20 +26,20 @@ STAGE PLANS: Map Operator Tree: TableScan alias: date_decimal_test - Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 483 Data size: 86990 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (cint is not null and cdouble is not null) (type: boolean) - Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 483 Data size: 86990 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cdate (type: date), cdecimal (type: decimal(20,10)) outputColumnNames: _col0, _col1 - Statistics: Num rows: 12288 Data size: 1651260 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 483 Data size: 86990 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 10 - Statistics: Num rows: 10 Data size: 1340 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1800 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 10 Data size: 1340 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 10 Data size: 1800 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/vectorization_short_regress.q.out b/ql/src/test/results/clientpositive/vectorization_short_regress.q.out index 7691dda..933498c 100644 --- a/ql/src/test/results/clientpositive/vectorization_short_regress.q.out +++ b/ql/src/test/results/clientpositive/vectorization_short_regress.q.out @@ -3000,9 +3000,9 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 951 Basic stats: COMPLETE Column stats: COMPLETE Select Operator - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: COMPLETE + Statistics: Num rows: 1 Data size: 951 Basic stats: COMPLETE Column stats: COMPLETE Group By Operator aggregations: count() mode: hash @@ -3058,11 +3058,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: ctinyint (type: tinyint) outputColumnNames: ctinyint - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(ctinyint) mode: hash @@ -3118,11 +3118,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cint (type: int) outputColumnNames: cint - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cint) mode: hash @@ -3178,11 +3178,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cfloat (type: float) outputColumnNames: cfloat - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cfloat) mode: hash @@ -3238,11 +3238,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cstring1 (type: string) outputColumnNames: cstring1 - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 9 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cstring1) mode: hash @@ -3298,11 +3298,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: alltypesnullorc - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: cboolean1 (type: boolean) outputColumnNames: cboolean1 - Statistics: Num rows: 12288 Data size: 168 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 237 Data size: 951 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: count(cboolean1) mode: hash diff --git a/ql/src/test/results/clientpositive/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/vectorized_ptf.q.out index 3b17591..4a628b0 100644 --- a/ql/src/test/results/clientpositive/vectorized_ptf.q.out +++ b/ql/src/test/results/clientpositive/vectorized_ptf.q.out @@ -6796,15 +6796,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns p_mfgr,p_name,p_size,r,dr,s columns.comments columns.types string:string:int:int:int:double #### A masked pattern was here #### name default.part_4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_4 @@ -6821,15 +6826,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns p_mfgr,p_name,p_size,r,dr,s columns.comments columns.types string:string:int:int:int:double #### A masked pattern was here #### name default.part_4 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct part_4 { string p_mfgr, string p_name, i32 p_size, i32 r, i32 dr, double s} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_4 @@ -7026,15 +7036,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1 columns.comments columns.types string:string:int:int:int:int:double:int #### A masked pattern was here #### name default.part_5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_5 @@ -7051,15 +7066,20 @@ STAGE PLANS: input format: org.apache.hadoop.mapred.TextInputFormat output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat properties: + COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bucket_count -1 columns p_mfgr,p_name,p_size,s2,r,dr,cud,fv1 columns.comments columns.types string:string:int:int:int:int:double:int #### A masked pattern was here #### name default.part_5 + numFiles 0 + numRows 0 + rawDataSize 0 serialization.ddl struct part_5 { string p_mfgr, string p_name, i32 p_size, i32 s2, i32 r, i32 dr, double cud, i32 fv1} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 0 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.part_5