diff --git a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java index 25c7508..c78f005 100644 --- a/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java +++ b/common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java @@ -232,6 +232,30 @@ public static void clearColumnStatsState(Map params) { } } + public static void removeColumnStatsState(Map params, List colNames) { + String statsAcc; + if (params != null && (statsAcc = params.get(COLUMN_STATS_ACCURATE)) != null) { + // statsAcc may not be jason format, which will throw exception + JSONObject stats = parseStatsAcc(statsAcc); + try { + JSONObject colStats = stats.getJSONObject(COLUMN_STATS); + for (String colName : colNames) { + if (colStats.has(colName)) { + colStats.remove(colName); + } + } + if (colStats.length() != 0) { + stats.put(COLUMN_STATS, colStats); + } else { + stats.remove(COLUMN_STATS); + } + params.put(COLUMN_STATS_ACCURATE, stats.toString()); + } catch (JSONException e) { + LOG.debug(e.getMessage()); + } + } + } + public static void setBasicStatsStateForCreateTable(Map params, String setting) { if (TRUE.equals(setting)) { for (String stat : StatsSetupConst.supportedStats) { diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java index 64d9fc1..bae39ac 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java @@ -729,6 +729,7 @@ private void updatePartColumnStatsForAlterColumns(RawStore msdb, Partition oldPa assert (partsColStats.size() <= 1); for (ColumnStatistics partColStats : partsColStats) { //actually only at most one loop List statsObjs = partColStats.getStatsObj(); + List deletedCols = new ArrayList(); for (ColumnStatisticsObj statsObj : statsObjs) { boolean found =false; for (FieldSchema newCol : newCols) { @@ -741,8 +742,10 @@ private void updatePartColumnStatsForAlterColumns(RawStore msdb, Partition oldPa if (!found) { msdb.deletePartitionColumnStatistics(dbName, tableName, oldPartName, partVals, statsObj.getColName()); + deletedCols.add(statsObj.getColName()); } } + StatsSetupConst.removeColumnStatsState(newPart.getParameters(), deletedCols); } } catch (NoSuchObjectException nsoe) { LOG.debug("Could not find db entry." + nsoe); @@ -827,6 +830,7 @@ void alterTableUpdateTableColumnStats(RawStore msdb, } else { List statsObjs = colStats.getStatsObj(); if (statsObjs != null) { + List deletedCols = new ArrayList(); for (ColumnStatisticsObj statsObj : statsObjs) { boolean found = false; for (FieldSchema newCol : newCols) { @@ -841,11 +845,14 @@ void alterTableUpdateTableColumnStats(RawStore msdb, if (!newDbName.equals(dbName) || !newTableName.equals(tableName)) { msdb.deleteTableColumnStatistics(dbName, tableName, statsObj.getColName()); newStatsObjs.add(statsObj); + deletedCols.add(statsObj.getColName()); } } else { msdb.deleteTableColumnStatistics(dbName, tableName, statsObj.getColName()); + deletedCols.add(statsObj.getColName()); } } + StatsSetupConst.removeColumnStatsState(newTable.getParameters(), deletedCols); } } } diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 4aea152..850e112 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -292,11 +292,14 @@ public static boolean requireCalStats(Configuration hiveConf, Partition oldPart, return true; } - if (environmentContext != null - && environmentContext.isSetProperties() - && StatsSetupConst.TASK.equals(environmentContext.getProperties().get( - StatsSetupConst.STATS_GENERATED))) { - return true; + if (environmentContext != null && environmentContext.isSetProperties()) { + String statsType = environmentContext.getProperties().get(StatsSetupConst.STATS_GENERATED); + // no matter STATS_GENERATED is USER or TASK, all need to re-calculate the stats: + // USER: alter table .. update statistics + // TASK: from some sql operation which could collect and compute stats + if (StatsSetupConst.TASK.equals(statsType) || StatsSetupConst.USER.equals(statsType)) { + return true; + } } // requires to calculate stats if new and old have different fast stats diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index a1fb874..2e12897 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -1171,10 +1171,12 @@ private int touch(Hive db, AlterTableSimpleDesc touchDesc) throws HiveException { Table tbl = db.getTable(touchDesc.getTableName()); + EnvironmentContext environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); if (touchDesc.getPartSpec() == null) { try { - db.alterTable(touchDesc.getTableName(), tbl, null); + db.alterTable(touchDesc.getTableName(), tbl, environmentContext); } catch (InvalidOperationException e) { throw new HiveException("Uable to update table"); } @@ -1186,7 +1188,7 @@ private int touch(Hive db, AlterTableSimpleDesc touchDesc) throw new HiveException("Specified partition does not exist"); } try { - db.alterPartition(touchDesc.getTableName(), part, null); + db.alterPartition(touchDesc.getTableName(), part, environmentContext); } catch (InvalidOperationException e) { throw new HiveException(e); } @@ -3493,6 +3495,16 @@ private boolean isSchemaEvolutionEnabled(Table tbl) { private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Partition part) throws HiveException { + EnvironmentContext environmentContext = alterTbl.getEnvironmentContext(); + if (environmentContext == null) { + environmentContext = new EnvironmentContext(); + alterTbl.setEnvironmentContext(environmentContext); + } + // do not need update stats in alter table/partition operations + if (environmentContext.getProperties() == null || + environmentContext.getProperties().get(StatsSetupConst.DO_NOT_UPDATE_STATS) == null) { + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); + } if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) { tbl.setDbName(Utilities.getDatabaseName(alterTbl.getNewName())); @@ -3630,6 +3642,10 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part } sd.setCols(alterTbl.getNewCols()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) { + if (StatsSetupConst.USER.equals(environmentContext.getProperties() + .get(StatsSetupConst.STATS_GENERATED))) { + environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS); + } if (part != null) { part.getTPartition().getParameters().putAll(alterTbl.getProps()); } else { @@ -3637,6 +3653,11 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.DROPPROPS) { Iterator keyItr = alterTbl.getProps().keySet().iterator(); + if (StatsSetupConst.USER.equals(environmentContext.getProperties() + .get(StatsSetupConst.STATS_GENERATED))) { + // drop a stats parameter, which triggers recompute stats update automatically + environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS); + } while (keyItr.hasNext()) { if (part != null) { part.getTPartition().getParameters().remove(keyItr.next()); @@ -3730,6 +3751,8 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part } catch (URISyntaxException e) { throw new HiveException(e); } + environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS); + } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSKEWEDBY) { // Validation's been done at compile time. no validation is needed here. List skewedColNames = null; @@ -3775,6 +3798,8 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part throw new HiveException(e); } } + + environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS); } else if (alterTbl.getOp() == AlterTableTypes.ALTERBUCKETNUM) { if (part != null) { if (part.getBucketCount() == alterTbl.getNumberBuckets()) { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 0f472e7..ba54d4e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1361,30 +1361,34 @@ private void analyzeAlterTableProps(String[] qualified, HashMap HashMap mapProp = getProps((ASTNode) (ast.getChild(0)) .getChild(0)); EnvironmentContext environmentContext = null; - if (queryState.getCommandType() - .equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName()) - || queryState.getCommandType() - .equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) { - // we need to check if the properties are valid, especially for stats. - boolean changeStatsSucceeded = false; - for (Entry entry : mapProp.entrySet()) { - // we make sure that we do not change anything if there is anything - // wrong. - if (entry.getKey().equals(StatsSetupConst.ROW_COUNT) - || entry.getKey().equals(StatsSetupConst.RAW_DATA_SIZE)) { - try { - Long.parseLong(entry.getValue()); - changeStatsSucceeded = true; - } catch (Exception e) { - throw new SemanticException("AlterTable " + entry.getKey() + " failed with value " - + entry.getValue()); - } - } else { + // we need to check if the properties are valid, especially for stats. + // they might be changed via alter table .. update statistics or + // alter table .. set tblproperties. If the property is not row_count + // or raw_data_size, it could not be changed through update statistics + boolean changeStatsSucceeded = false; + for (Entry entry : mapProp.entrySet()) { + // we make sure that we do not change anything if there is anything + // wrong. + if (entry.getKey().equals(StatsSetupConst.ROW_COUNT) + || entry.getKey().equals(StatsSetupConst.RAW_DATA_SIZE)) { + try { + Long.parseLong(entry.getValue()); + changeStatsSucceeded = true; + } catch (Exception e) { + throw new SemanticException("AlterTable " + entry.getKey() + " failed with value " + + entry.getValue()); + } + } else { + if (queryState.getCommandType() + .equals(HiveOperation.ALTERTABLE_UPDATETABLESTATS.getOperationName()) + || queryState.getCommandType() + .equals(HiveOperation.ALTERTABLE_UPDATEPARTSTATS.getOperationName())) { throw new SemanticException("AlterTable UpdateStats " + entry.getKey() - + " failed because the only valid keys are" + StatsSetupConst.ROW_COUNT + " and " + + " failed because the only valid keys are " + StatsSetupConst.ROW_COUNT + " and " + StatsSetupConst.RAW_DATA_SIZE); } } + if (changeStatsSucceeded) { environmentContext = new EnvironmentContext(); environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.USER); diff --git a/ql/src/test/queries/clientpositive/alter_table_stats_status.q b/ql/src/test/queries/clientpositive/alter_table_stats_status.q new file mode 100644 index 0000000..8e07b81 --- /dev/null +++ b/ql/src/test/queries/clientpositive/alter_table_stats_status.q @@ -0,0 +1,48 @@ +create database statsdb; +use statsdb; +create table srctable like default.src; +load data local inpath '../../data/files/kv1.txt' overwrite into table srctable; + +analyze table srctable compute statistics; +describe formatted srctable; + +alter table srctable touch; +alter table srctable rename to statstable; + +alter table statstable add columns (newcol string); +alter table statstable change key key string; +alter table statstable set tblproperties('testtblstats'='unchange'); +describe formatted statstable; + +alter table statstable update statistics set ('numRows' = '1000'); +describe formatted statstable; + +analyze table statstable compute statistics; +describe formatted statstable; +alter table statstable set location '${system:test.tmp.dir}/newdir'; +describe formatted statstable; + +drop table statstable; + +create table srcpart like default.srcpart; +load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11'); +load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12'); + +analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics; +describe formatted srcpart partition (ds='2008-04-08', hr='11'); + +alter table srcpart touch; +alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11'); +alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string); +alter table srcpart partition (ds='2017-01-19', hr='11') change key key string; +alter table srcpart set tblproperties('testpartstats'='unchange'); +describe formatted srcpart partition (ds='2017-01-19', hr='11'); + +alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000'); +describe formatted srcpart partition (ds='2017-01-19', hr='11'); + +analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics; +describe formatted srcpart partition (ds='2017-01-19', hr='11'); + +drop table srcpart; + diff --git a/ql/src/test/results/clientnegative/unset_table_property.q.out b/ql/src/test/results/clientnegative/unset_table_property.q.out index 0705b92..4aedfc5 100644 --- a/ql/src/test/results/clientnegative/unset_table_property.q.out +++ b/ql/src/test/results/clientnegative/unset_table_property.q.out @@ -18,6 +18,7 @@ PREHOOK: query: SHOW TBLPROPERTIES testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} a 1 c 3 #### A masked pattern was here #### diff --git a/ql/src/test/results/clientpositive/alter_file_format.q.out b/ql/src/test/results/clientpositive/alter_file_format.q.out index 14dd892..cdb59d4 100644 --- a/ql/src/test/results/clientpositive/alter_file_format.q.out +++ b/ql/src/test/results/clientpositive/alter_file_format.q.out @@ -67,6 +67,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -110,6 +111,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -153,6 +155,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -196,6 +199,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -239,6 +243,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -282,6 +287,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 diff --git a/ql/src/test/results/clientpositive/alter_skewed_table.q.out b/ql/src/test/results/clientpositive/alter_skewed_table.q.out index 0f60ba3..fefef4c 100644 --- a/ql/src/test/results/clientpositive/alter_skewed_table.q.out +++ b/ql/src/test/results/clientpositive/alter_skewed_table.q.out @@ -67,6 +67,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -169,6 +170,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -267,6 +269,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 diff --git a/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out b/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out index 566b804..5afb7fa 100644 --- a/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out +++ b/ql/src/test/results/clientpositive/alter_table_not_sorted.q.out @@ -68,6 +68,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} SORTBUCKETCOLSPREFIX TRUE #### A masked pattern was here #### numFiles 0 diff --git a/ql/src/test/results/clientpositive/alter_table_stats_status.q.out b/ql/src/test/results/clientpositive/alter_table_stats_status.q.out new file mode 100644 index 0000000..3404f88 --- /dev/null +++ b/ql/src/test/results/clientpositive/alter_table_stats_status.q.out @@ -0,0 +1,572 @@ +PREHOOK: query: create database statsdb +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:statsdb +POSTHOOK: query: create database statsdb +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:statsdb +PREHOOK: query: use statsdb +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:statsdb +POSTHOOK: query: use statsdb +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:statsdb +PREHOOK: query: create table srctable like default.src +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:statsdb +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: create table srctable like default.src +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:statsdb +POSTHOOK: Output: statsdb@srctable +PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srctable +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srctable +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: statsdb@srctable +PREHOOK: query: analyze table srctable compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: statsdb@srctable +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: analyze table srctable compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: statsdb@srctable +POSTHOOK: Output: statsdb@srctable +PREHOOK: query: describe formatted srctable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srctable +POSTHOOK: query: describe formatted srctable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srctable +# col_name data_type comment + +key string default +value string default + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: alter table srctable touch +PREHOOK: type: ALTERTABLE_TOUCH +PREHOOK: Input: statsdb@srctable +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: alter table srctable touch +POSTHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: Input: statsdb@srctable +POSTHOOK: Output: statsdb@srctable +PREHOOK: query: alter table srctable rename to statstable +PREHOOK: type: ALTERTABLE_RENAME +PREHOOK: Input: statsdb@srctable +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: alter table srctable rename to statstable +POSTHOOK: type: ALTERTABLE_RENAME +POSTHOOK: Input: statsdb@srctable +POSTHOOK: Output: statsdb@srctable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: alter table statstable add columns (newcol string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: alter table statstable add columns (newcol string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: alter table statstable change key key string +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: alter table statstable change key key string +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: alter table statstable set tblproperties('testtblstats'='unchange') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: alter table statstable set tblproperties('testtblstats'='unchange') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: describe formatted statstable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@statstable +POSTHOOK: query: describe formatted statstable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@statstable +# col_name data_type comment + +key string default +value string default +newcol string + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 1 + numRows 500 + rawDataSize 5312 + testtblstats unchange + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: alter table statstable update statistics set ('numRows' = '1000') +PREHOOK: type: ALTERTABLE_UPDATETABLESTATS +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: alter table statstable update statistics set ('numRows' = '1000') +POSTHOOK: type: ALTERTABLE_UPDATETABLESTATS +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: describe formatted statstable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@statstable +POSTHOOK: query: describe formatted statstable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@statstable +# col_name data_type comment + +key string default +value string default +newcol string + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + numFiles 1 + numRows 1000 + rawDataSize 5312 + testtblstats unchange + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: analyze table statstable compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: analyze table statstable compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: describe formatted statstable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@statstable +POSTHOOK: query: describe formatted statstable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@statstable +# col_name data_type comment + +key string default +value string default +newcol string + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 1 + numRows 500 + rawDataSize 5312 + testtblstats unchange + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +#### A masked pattern was here #### +PREHOOK: type: ALTERTABLE_LOCATION +PREHOOK: Input: statsdb@statstable +#### A masked pattern was here #### +PREHOOK: Output: statsdb@statstable +#### A masked pattern was here #### +POSTHOOK: type: ALTERTABLE_LOCATION +POSTHOOK: Input: statsdb@statstable +#### A masked pattern was here #### +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: describe formatted statstable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@statstable +POSTHOOK: query: describe formatted statstable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@statstable +# col_name data_type comment + +key string default +value string default +newcol string + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + numFiles 0 + numRows 500 + rawDataSize 5312 + testtblstats unchange + totalSize 0 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: drop table statstable +PREHOOK: type: DROPTABLE +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: drop table statstable +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: create table srcpart like default.srcpart +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:statsdb +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: create table srcpart like default.srcpart +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:statsdb +POSTHOOK: Output: statsdb@srcpart +PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=12 +PREHOOK: query: analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: statsdb@srcpart +PREHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Output: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: query: analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +PREHOOK: query: describe formatted srcpart partition (ds='2008-04-08', hr='11') +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srcpart +POSTHOOK: query: describe formatted srcpart partition (ds='2008-04-08', hr='11') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srcpart +# col_name data_type comment + +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: statsdb +Table: srcpart +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: alter table srcpart touch +PREHOOK: type: ALTERTABLE_TOUCH +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: alter table srcpart touch +POSTHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart +PREHOOK: query: alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11') +PREHOOK: type: ALTERTABLE_RENAMEPART +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: query: alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11') +POSTHOOK: type: ALTERTABLE_RENAMEPART +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') change key key string +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') change key key string +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: alter table srcpart set tblproperties('testpartstats'='unchange') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: alter table srcpart set tblproperties('testpartstats'='unchange') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart +PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srcpart +POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srcpart +# col_name data_type comment + +key string default +value string default +newcol string + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2017-01-19, 11] +Database: statsdb +Table: srcpart +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000') +PREHOOK: type: ALTERTABLE_UPDATEPARTSTATS +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000') +POSTHOOK: type: ALTERTABLE_UPDATEPARTSTATS +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srcpart +POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srcpart +# col_name data_type comment + +key string default +value string default +newcol string + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2017-01-19, 11] +Database: statsdb +Table: srcpart +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + numFiles 1 + numRows 1000 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: statsdb@srcpart +PREHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: Output: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: query: analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: Output: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srcpart +POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srcpart +# col_name data_type comment + +key string default +value string default +newcol string + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2017-01-19, 11] +Database: statsdb +Table: srcpart +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: drop table srcpart +PREHOOK: type: DROPTABLE +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: drop table srcpart +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart diff --git a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out index e4dc75e..f03a85d 100644 --- a/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out +++ b/ql/src/test/results/clientpositive/columnStatsUpdateForStatsOptimizer_2.q.out @@ -172,6 +172,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 2 numRows 3 @@ -255,44 +256,12 @@ PREHOOK: type: QUERY POSTHOOK: query: explain select count(1) from calendar POSTHOOK: type: QUERY STAGE DEPENDENCIES: - Stage-1 is a root stage - Stage-0 depends on stages: Stage-1 + Stage-0 is a root stage STAGE PLANS: - Stage: Stage-1 - Map Reduce - Map Operator Tree: - TableScan - alias: calendar - Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE - Select Operator - Statistics: Num rows: 3 Data size: 24 Basic stats: COMPLETE Column stats: COMPLETE - Group By Operator - aggregations: count(1) - mode: hash - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - Reduce Output Operator - sort order: - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - value expressions: _col0 (type: bigint) - Reduce Operator Tree: - Group By Operator - aggregations: count(VALUE._col0) - mode: mergepartial - outputColumnNames: _col0 - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - File Output Operator - compressed: false - Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE - table: - input format: org.apache.hadoop.mapred.SequenceFileInputFormat - output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat - serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - Stage: Stage-0 Fetch Operator - limit: -1 + limit: 1 Processor Tree: ListSink diff --git a/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out b/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out index 216d3be..892c962 100644 --- a/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out +++ b/ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out @@ -84,6 +84,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -132,6 +133,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -187,6 +189,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -235,6 +238,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -282,6 +286,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 diff --git a/ql/src/test/results/clientpositive/create_like.q.out b/ql/src/test/results/clientpositive/create_like.q.out index 58d9879..ff2e752 100644 --- a/ql/src/test/results/clientpositive/create_like.q.out +++ b/ql/src/test/results/clientpositive/create_like.q.out @@ -354,6 +354,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} avro.schema.literal { \"namespace\": \"testing.hive.avro.serde\", \"name\": \"doctors\", diff --git a/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out b/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out index 703fa14..a7723a6 100644 --- a/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out +++ b/ql/src/test/results/clientpositive/describe_comment_nonascii.q.out @@ -53,6 +53,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} #### A masked pattern was here #### numFiles 0 numRows 0 diff --git a/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out b/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out index 48a86cf..e12357c 100644 --- a/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out +++ b/ql/src/test/results/clientpositive/llap/orc_predicate_pushdown.q.out @@ -141,11 +141,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hash(t) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: hash @@ -199,11 +199,11 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hash(t) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: hash @@ -337,14 +337,14 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean) - Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hash(t) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: hash @@ -405,14 +405,14 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean) - Statistics: Num rows: 6037 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t < 0) and (UDFToInteger(t) > -2)) (type: boolean) - Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: hash(t) (type: int) outputColumnNames: _col0 - Statistics: Num rows: 670 Data size: 2680 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 116 Data size: 34409 Basic stats: COMPLETE Column stats: NONE Group By Operator aggregations: sum(_col0) mode: hash @@ -605,18 +605,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean) - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), s (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: string) sort order: ++ - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -625,10 +625,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -671,18 +671,18 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean) - Statistics: Num rows: 232 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: (s is not null and (s like 'bob%') and (not (t) IN (-1, -2, -3)) and t BETWEEN 25 AND 30) (type: boolean) - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), s (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col0 (type: tinyint), _col1 (type: string) sort order: ++ - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE Execution mode: llap LLAP IO: all inputs Reducer 2 @@ -691,10 +691,10 @@ STAGE PLANS: Select Operator expressions: KEY.reducesinkkey0 (type: tinyint), KEY.reducesinkkey1 (type: string) outputColumnNames: _col0, _col1 - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 6 Data size: 624 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 29 Data size: 8602 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -802,18 +802,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean) - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Execution mode: llap @@ -824,13 +824,13 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -881,18 +881,18 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean) - Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((d >= 10.0) and (d < 12.0) and (s like '%son') and (t > 0) and si BETWEEN 300 AND 400 and (not (s like '%car%'))) (type: boolean) - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Execution mode: llap @@ -903,13 +903,13 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1026,18 +1026,18 @@ STAGE PLANS: Map Operator Tree: TableScan alias: orc_pred - Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t > 10) and (t <> 101) and (d >= 10) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean) - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Execution mode: llap @@ -1048,14 +1048,14 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Reducer 3 @@ -1064,13 +1064,13 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat @@ -1126,18 +1126,18 @@ STAGE PLANS: TableScan alias: orc_pred filterExpr: ((t > 10) and (t <> 101) and (d >= 10) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean) - Statistics: Num rows: 208 Data size: 24150 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1049 Data size: 311170 Basic stats: COMPLETE Column stats: NONE Filter Operator predicate: ((t > 10) and (t <> 101) and (d >= 10) and (d < 12.0) and (s like '%son') and (not (s like '%car%')) and (t > 0) and si BETWEEN 300 AND 400) (type: boolean) - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Select Operator expressions: t (type: tinyint), si (type: smallint), d (type: double), s (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Execution mode: llap @@ -1148,14 +1148,14 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Reduce Output Operator key expressions: _col3 (type: string) sort order: - - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE TopN Hash Memory Usage: 0.1 value expressions: _col0 (type: tinyint), _col1 (type: smallint), _col2 (type: double) Reducer 3 @@ -1164,13 +1164,13 @@ STAGE PLANS: Select Operator expressions: VALUE._col0 (type: tinyint), VALUE._col1 (type: smallint), VALUE._col2 (type: double), KEY.reducesinkkey0 (type: string) outputColumnNames: _col0, _col1, _col2, _col3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE Limit Number of rows: 3 - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE File Output Operator compressed: false - Statistics: Num rows: 1 Data size: 116 Basic stats: COMPLETE Column stats: NONE + Statistics: Num rows: 1 Data size: 296 Basic stats: COMPLETE Column stats: NONE table: input format: org.apache.hadoop.mapred.SequenceFileInputFormat output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out index aaf18a8..4105bbb 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_nonvec_table.q.out @@ -96,9 +96,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=99) + Select Operator [SEL_1] (rows=6 width=99) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=99) + TableScan [TS_0] (rows=6 width=99) default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -219,9 +219,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=99) + Select Operator [SEL_1] (rows=6 width=114) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=99) + TableScan [TS_0] (rows=6 width=114) default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -412,9 +412,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=422) + Select Operator [SEL_1] (rows=6 width=370) Output:["_col0","_col1","_col2","_col3","_col4"] - TableScan [TS_0] (rows=5 width=422) + TableScan [TS_0] (rows=6 width=370) default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"] PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double @@ -696,9 +696,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=164) + Select Operator [SEL_1] (rows=6 width=479) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"] - TableScan [TS_0] (rows=5 width=164) + TableScan [TS_0] (rows=6 width=479) default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group @@ -867,9 +867,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=588) + Select Operator [SEL_1] (rows=6 width=752) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"] - TableScan [TS_0] (rows=5 width=588) + TableScan [TS_0] (rows=6 width=752) default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out index 0f0e1f7..38d6f0b 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_orc_vec_table.q.out @@ -96,9 +96,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=99) + Select Operator [SEL_3] (rows=6 width=99) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=99) + TableScan [TS_0] (rows=6 width=99) default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -219,9 +219,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=99) + Select Operator [SEL_3] (rows=6 width=114) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=99) + TableScan [TS_0] (rows=6 width=114) default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -412,9 +412,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=422) + Select Operator [SEL_3] (rows=6 width=370) Output:["_col0","_col1","_col2","_col3","_col4"] - TableScan [TS_0] (rows=5 width=422) + TableScan [TS_0] (rows=6 width=370) default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"] PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double @@ -696,9 +696,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=164) + Select Operator [SEL_3] (rows=6 width=479) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"] - TableScan [TS_0] (rows=5 width=164) + TableScan [TS_0] (rows=6 width=479) default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group @@ -867,9 +867,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=588) + Select Operator [SEL_3] (rows=6 width=752) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"] - TableScan [TS_0] (rows=5 width=588) + TableScan [TS_0] (rows=6 width=752) default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out index 991dbfc..dc0896e 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_nonvec_table.q.out @@ -96,9 +96,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=20) + Select Operator [SEL_1] (rows=6 width=20) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=20) + TableScan [TS_0] (rows=6 width=20) default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -219,9 +219,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=20) + Select Operator [SEL_1] (rows=6 width=21) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=20) + TableScan [TS_0] (rows=6 width=21) default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -412,9 +412,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=90) + Select Operator [SEL_1] (rows=6 width=80) Output:["_col0","_col1","_col2","_col3","_col4"] - TableScan [TS_0] (rows=5 width=90) + TableScan [TS_0] (rows=6 width=80) default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"] PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double @@ -696,9 +696,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=151) + Select Operator [SEL_1] (rows=6 width=178) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"] - TableScan [TS_0] (rows=5 width=151) + TableScan [TS_0] (rows=6 width=178) default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group @@ -867,9 +867,9 @@ Stage-0 Stage-1 Map 1 llap File Output Operator [FS_2] - Select Operator [SEL_1] (rows=5 width=250) + Select Operator [SEL_1] (rows=6 width=249) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"] - TableScan [TS_0] (rows=5 width=250) + TableScan [TS_0] (rows=6 width=249) default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out index 30fa31b..68dce9c 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vec_table.q.out @@ -100,9 +100,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=20) + Select Operator [SEL_3] (rows=6 width=20) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=20) + TableScan [TS_0] (rows=6 width=20) default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -223,9 +223,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=20) + Select Operator [SEL_3] (rows=6 width=21) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=20) + TableScan [TS_0] (rows=6 width=21) default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -416,9 +416,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=90) + Select Operator [SEL_3] (rows=6 width=80) Output:["_col0","_col1","_col2","_col3","_col4"] - TableScan [TS_0] (rows=5 width=90) + TableScan [TS_0] (rows=6 width=80) default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"] PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double @@ -700,9 +700,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=151) + Select Operator [SEL_3] (rows=6 width=178) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"] - TableScan [TS_0] (rows=5 width=151) + TableScan [TS_0] (rows=6 width=178) default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group @@ -871,9 +871,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=250) + Select Operator [SEL_3] (rows=6 width=249) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"] - TableScan [TS_0] (rows=5 width=250) + TableScan [TS_0] (rows=6 width=249) default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group diff --git a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out index c3426dc..b2a139d 100644 --- a/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out +++ b/ql/src/test/results/clientpositive/llap/schema_evol_text_vecrow_table.q.out @@ -100,9 +100,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=20) + Select Operator [SEL_3] (rows=6 width=20) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=20) + TableScan [TS_0] (rows=6 width=20) default@table_add_int_permute_select,table_add_int_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -223,9 +223,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=20) + Select Operator [SEL_3] (rows=6 width=21) Output:["_col0","_col1","_col2"] - TableScan [TS_0] (rows=5 width=20) + TableScan [TS_0] (rows=6 width=21) default@table_add_int_string_permute_select,table_add_int_string_permute_select,Tbl:COMPLETE,Col:NONE,Output:["insert_num","a","b"] PREHOOK: query: -- SELECT permutation columns to make sure NULL defaulting works right @@ -416,9 +416,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=90) + Select Operator [SEL_3] (rows=6 width=80) Output:["_col0","_col1","_col2","_col3","_col4"] - TableScan [TS_0] (rows=5 width=90) + TableScan [TS_0] (rows=6 width=80) default@table_change_string_group_double,table_change_string_group_double,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","b"] PREHOOK: query: select insert_num,c1,c2,c3,b from table_change_string_group_double @@ -700,9 +700,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=151) + Select Operator [SEL_3] (rows=6 width=178) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16","_col17","_col18","_col19","_col20","_col21"] - TableScan [TS_0] (rows=5 width=151) + TableScan [TS_0] (rows=6 width=178) default@table_change_numeric_group_string_group_multi_ints_string_group,table_change_numeric_group_string_group_multi_ints_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","c16","c17","c18","c19","c20","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c17,c18,c19,c20,b from table_change_numeric_group_string_group_multi_ints_string_group @@ -871,9 +871,9 @@ Stage-0 Stage-1 Map 1 vectorized, llap File Output Operator [FS_4] - Select Operator [SEL_3] (rows=5 width=250) + Select Operator [SEL_3] (rows=6 width=249) Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7","_col8","_col9","_col10","_col11","_col12","_col13","_col14","_col15","_col16"] - TableScan [TS_0] (rows=5 width=250) + TableScan [TS_0] (rows=6 width=249) default@table_change_numeric_group_string_group_floating_string_group,table_change_numeric_group_string_group_floating_string_group,Tbl:COMPLETE,Col:NONE,Output:["insert_num","c1","c2","c3","c4","c5","c6","c7","c8","c9","c10","c11","c12","c13","c14","c15","b"] PREHOOK: query: select insert_num,c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,b from table_change_numeric_group_string_group_floating_string_group diff --git a/ql/src/test/results/clientpositive/show_tblproperties.q.out b/ql/src/test/results/clientpositive/show_tblproperties.q.out index e1c6670..953a0a4 100644 --- a/ql/src/test/results/clientpositive/show_tblproperties.q.out +++ b/ql/src/test/results/clientpositive/show_tblproperties.q.out @@ -36,6 +36,7 @@ PREHOOK: query: show tblproperties tmpfoo PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: show tblproperties tmpfoo POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bar bar value #### A masked pattern was here #### numFiles 0 @@ -53,6 +54,7 @@ PREHOOK: query: show tblproperties default.tmpfoo PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: show tblproperties default.tmpfoo POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bar bar value #### A masked pattern was here #### numFiles 0 @@ -108,6 +110,7 @@ PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: -- from db1 to default db show tblproperties default.tmpfoo POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bar bar value #### A masked pattern was here #### numFiles 0 @@ -127,6 +130,7 @@ PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: -- from db1 to db1 show tblproperties tmpfoo POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bar bar value1 #### A masked pattern was here #### numFiles 0 @@ -152,6 +156,7 @@ PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: -- from default to db1 show tblproperties db1.tmpfoo POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} bar bar value1 #### A masked pattern was here #### numFiles 0 diff --git a/ql/src/test/results/clientpositive/stats_invalidation.q.out b/ql/src/test/results/clientpositive/stats_invalidation.q.out index d822f4f..a0e7663 100644 --- a/ql/src/test/results/clientpositive/stats_invalidation.q.out +++ b/ql/src/test/results/clientpositive/stats_invalidation.q.out @@ -88,6 +88,7 @@ Retention: 0 #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"key\":\"true\",\"value\":\"true\"}} #### A masked pattern was here #### numFiles 1 numRows 500 diff --git a/ql/src/test/results/clientpositive/unset_table_view_property.q.out b/ql/src/test/results/clientpositive/unset_table_view_property.q.out index a3dec73..3e1b14f 100644 --- a/ql/src/test/results/clientpositive/unset_table_view_property.q.out +++ b/ql/src/test/results/clientpositive/unset_table_view_property.q.out @@ -36,6 +36,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} a 1 c 3 #### A masked pattern was here #### @@ -58,6 +59,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -76,6 +78,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} a 1 c 3 d 4 @@ -99,6 +102,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} c 3 #### A masked pattern was here #### numFiles 0 @@ -120,6 +124,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} #### A masked pattern was here #### numFiles 0 numRows 0 @@ -138,6 +143,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} a 1 b 2 c 3 @@ -162,6 +168,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} a 1 c 3 #### A masked pattern was here #### @@ -184,6 +191,7 @@ PREHOOK: query: SHOW TBLPROPERTIES vt.testTable PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: SHOW TBLPROPERTIES vt.testTable POSTHOOK: type: SHOW_TBLPROPERTIES +COLUMN_STATS_ACCURATE {"BASIC_STATS":"true"} a 1 #### A masked pattern was here #### numFiles 0