diff --git a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java index 4aea152..850e112 100644 --- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java +++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java @@ -292,11 +292,14 @@ public static boolean requireCalStats(Configuration hiveConf, Partition oldPart, return true; } - if (environmentContext != null - && environmentContext.isSetProperties() - && StatsSetupConst.TASK.equals(environmentContext.getProperties().get( - StatsSetupConst.STATS_GENERATED))) { - return true; + if (environmentContext != null && environmentContext.isSetProperties()) { + String statsType = environmentContext.getProperties().get(StatsSetupConst.STATS_GENERATED); + // no matter STATS_GENERATED is USER or TASK, all need to re-calculate the stats: + // USER: alter table .. update statistics + // TASK: from some sql operation which could collect and compute stats + if (StatsSetupConst.TASK.equals(statsType) || StatsSetupConst.USER.equals(statsType)) { + return true; + } } // requires to calculate stats if new and old have different fast stats diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java index a1fb874..209ae6d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java @@ -1171,10 +1171,12 @@ private int touch(Hive db, AlterTableSimpleDesc touchDesc) throws HiveException { Table tbl = db.getTable(touchDesc.getTableName()); + EnvironmentContext environmentContext = new EnvironmentContext(); + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); if (touchDesc.getPartSpec() == null) { try { - db.alterTable(touchDesc.getTableName(), tbl, null); + db.alterTable(touchDesc.getTableName(), tbl, environmentContext); } catch (InvalidOperationException e) { throw new HiveException("Uable to update table"); } @@ -1186,7 +1188,7 @@ private int touch(Hive db, AlterTableSimpleDesc touchDesc) throw new HiveException("Specified partition does not exist"); } try { - db.alterPartition(touchDesc.getTableName(), part, null); + db.alterPartition(touchDesc.getTableName(), part, environmentContext); } catch (InvalidOperationException e) { throw new HiveException(e); } @@ -3493,6 +3495,16 @@ private boolean isSchemaEvolutionEnabled(Table tbl) { private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Partition part) throws HiveException { + EnvironmentContext environmentContext = alterTbl.getEnvironmentContext(); + if (environmentContext == null) { + environmentContext = new EnvironmentContext(); + alterTbl.setEnvironmentContext(environmentContext); + } + // do not need update stats in alter table/partition operations + if (environmentContext.getProperties() == null || + environmentContext.getProperties().get(StatsSetupConst.DO_NOT_UPDATE_STATS) == null) { + environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, StatsSetupConst.TRUE); + } if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.RENAME) { tbl.setDbName(Utilities.getDatabaseName(alterTbl.getNewName())); @@ -3630,6 +3642,14 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part } sd.setCols(alterTbl.getNewCols()); } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDPROPS) { + // STATS_GENERATED==StatsSetupConst.USER is set only when the stats row_count, raw_data_size + // are updated using alter table .. update statistics .. But the stats parameters including + // totalSize etc could also be set using command like alter table .. set tblproperties, in this + // case, STATS_GENERATED will not be set, so we check hasStatsInParameters(alterTbl.getProps()) + // here which also covered the case were STATS_GENERATED==StatsSetupConst.USER + if (hasStatsInParameters(alterTbl.getProps()) && environmentContext.getProperties() != null) { + environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS); + } if (part != null) { part.getTPartition().getParameters().putAll(alterTbl.getProps()); } else { @@ -3637,6 +3657,10 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.DROPPROPS) { Iterator keyItr = alterTbl.getProps().keySet().iterator(); + if (hasStatsInParameters(alterTbl.getProps()) && environmentContext.isSetProperties()) { + // drop a stats parameter, which triggers recompute stats update automatically + environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS); + } while (keyItr.hasNext()) { if (part != null) { part.getTPartition().getParameters().remove(keyItr.next()); @@ -3730,6 +3754,9 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part } catch (URISyntaxException e) { throw new HiveException(e); } + if (environmentContext.isSetProperties()) { + environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS); + } } else if (alterTbl.getOp() == AlterTableDesc.AlterTableTypes.ADDSKEWEDBY) { // Validation's been done at compile time. no validation is needed here. List skewedColNames = null; @@ -3775,6 +3802,9 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part throw new HiveException(e); } } + if (environmentContext.isSetProperties()) { + environmentContext.getProperties().remove(StatsSetupConst.DO_NOT_UPDATE_STATS); + } } else if (alterTbl.getOp() == AlterTableTypes.ALTERBUCKETNUM) { if (part != null) { if (part.getBucketCount() == alterTbl.getNumberBuckets()) { @@ -3794,6 +3824,20 @@ private int alterTableOrSinglePartition(AlterTableDesc alterTbl, Table tbl, Part return 0; } + private boolean hasStatsInParameters(Map params) { + // the table stats are stored as the table parameters, so alter table set properties + // could set or update the table stats + if (params == null) { + return false; + } + for (String stats : StatsSetupConst.supportedStats) { + if (params.containsKey(stats)) { + return true; + } + } + return false; + } + private int dropConstraint(Hive db, AlterTableDesc alterTbl) throws SemanticException, HiveException { try { diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java index 0f472e7..bfdaae7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java @@ -1381,7 +1381,7 @@ private void analyzeAlterTableProps(String[] qualified, HashMap } } else { throw new SemanticException("AlterTable UpdateStats " + entry.getKey() - + " failed because the only valid keys are" + StatsSetupConst.ROW_COUNT + " and " + + " failed because the only valid keys are " + StatsSetupConst.ROW_COUNT + " and " + StatsSetupConst.RAW_DATA_SIZE); } } diff --git a/ql/src/test/queries/clientpositive/alter_table_stats_status.q b/ql/src/test/queries/clientpositive/alter_table_stats_status.q new file mode 100644 index 0000000..8e07b81 --- /dev/null +++ b/ql/src/test/queries/clientpositive/alter_table_stats_status.q @@ -0,0 +1,48 @@ +create database statsdb; +use statsdb; +create table srctable like default.src; +load data local inpath '../../data/files/kv1.txt' overwrite into table srctable; + +analyze table srctable compute statistics; +describe formatted srctable; + +alter table srctable touch; +alter table srctable rename to statstable; + +alter table statstable add columns (newcol string); +alter table statstable change key key string; +alter table statstable set tblproperties('testtblstats'='unchange'); +describe formatted statstable; + +alter table statstable update statistics set ('numRows' = '1000'); +describe formatted statstable; + +analyze table statstable compute statistics; +describe formatted statstable; +alter table statstable set location '${system:test.tmp.dir}/newdir'; +describe formatted statstable; + +drop table statstable; + +create table srcpart like default.srcpart; +load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11'); +load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12'); + +analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics; +describe formatted srcpart partition (ds='2008-04-08', hr='11'); + +alter table srcpart touch; +alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11'); +alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string); +alter table srcpart partition (ds='2017-01-19', hr='11') change key key string; +alter table srcpart set tblproperties('testpartstats'='unchange'); +describe formatted srcpart partition (ds='2017-01-19', hr='11'); + +alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000'); +describe formatted srcpart partition (ds='2017-01-19', hr='11'); + +analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics; +describe formatted srcpart partition (ds='2017-01-19', hr='11'); + +drop table srcpart; + diff --git a/ql/src/test/results/clientpositive/alter_table_stats_status.q.out b/ql/src/test/results/clientpositive/alter_table_stats_status.q.out new file mode 100644 index 0000000..3404f88 --- /dev/null +++ b/ql/src/test/results/clientpositive/alter_table_stats_status.q.out @@ -0,0 +1,572 @@ +PREHOOK: query: create database statsdb +PREHOOK: type: CREATEDATABASE +PREHOOK: Output: database:statsdb +POSTHOOK: query: create database statsdb +POSTHOOK: type: CREATEDATABASE +POSTHOOK: Output: database:statsdb +PREHOOK: query: use statsdb +PREHOOK: type: SWITCHDATABASE +PREHOOK: Input: database:statsdb +POSTHOOK: query: use statsdb +POSTHOOK: type: SWITCHDATABASE +POSTHOOK: Input: database:statsdb +PREHOOK: query: create table srctable like default.src +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:statsdb +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: create table srctable like default.src +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:statsdb +POSTHOOK: Output: statsdb@srctable +PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srctable +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srctable +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: statsdb@srctable +PREHOOK: query: analyze table srctable compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: statsdb@srctable +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: analyze table srctable compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: statsdb@srctable +POSTHOOK: Output: statsdb@srctable +PREHOOK: query: describe formatted srctable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srctable +POSTHOOK: query: describe formatted srctable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srctable +# col_name data_type comment + +key string default +value string default + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: alter table srctable touch +PREHOOK: type: ALTERTABLE_TOUCH +PREHOOK: Input: statsdb@srctable +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: alter table srctable touch +POSTHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: Input: statsdb@srctable +POSTHOOK: Output: statsdb@srctable +PREHOOK: query: alter table srctable rename to statstable +PREHOOK: type: ALTERTABLE_RENAME +PREHOOK: Input: statsdb@srctable +PREHOOK: Output: statsdb@srctable +POSTHOOK: query: alter table srctable rename to statstable +POSTHOOK: type: ALTERTABLE_RENAME +POSTHOOK: Input: statsdb@srctable +POSTHOOK: Output: statsdb@srctable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: alter table statstable add columns (newcol string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: alter table statstable add columns (newcol string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: alter table statstable change key key string +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: alter table statstable change key key string +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: alter table statstable set tblproperties('testtblstats'='unchange') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: alter table statstable set tblproperties('testtblstats'='unchange') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: describe formatted statstable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@statstable +POSTHOOK: query: describe formatted statstable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@statstable +# col_name data_type comment + +key string default +value string default +newcol string + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 1 + numRows 500 + rawDataSize 5312 + testtblstats unchange + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: alter table statstable update statistics set ('numRows' = '1000') +PREHOOK: type: ALTERTABLE_UPDATETABLESTATS +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: alter table statstable update statistics set ('numRows' = '1000') +POSTHOOK: type: ALTERTABLE_UPDATETABLESTATS +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: describe formatted statstable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@statstable +POSTHOOK: query: describe formatted statstable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@statstable +# col_name data_type comment + +key string default +value string default +newcol string + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + numFiles 1 + numRows 1000 + rawDataSize 5312 + testtblstats unchange + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: analyze table statstable compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: analyze table statstable compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: describe formatted statstable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@statstable +POSTHOOK: query: describe formatted statstable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@statstable +# col_name data_type comment + +key string default +value string default +newcol string + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 1 + numRows 500 + rawDataSize 5312 + testtblstats unchange + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +#### A masked pattern was here #### +PREHOOK: type: ALTERTABLE_LOCATION +PREHOOK: Input: statsdb@statstable +#### A masked pattern was here #### +PREHOOK: Output: statsdb@statstable +#### A masked pattern was here #### +POSTHOOK: type: ALTERTABLE_LOCATION +POSTHOOK: Input: statsdb@statstable +#### A masked pattern was here #### +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: describe formatted statstable +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@statstable +POSTHOOK: query: describe formatted statstable +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@statstable +# col_name data_type comment + +key string default +value string default +newcol string + +# Detailed Table Information +Database: statsdb +#### A masked pattern was here #### +Retention: 0 +#### A masked pattern was here #### +Table Type: MANAGED_TABLE +Table Parameters: +#### A masked pattern was here #### + numFiles 0 + numRows 500 + rawDataSize 5312 + testtblstats unchange + totalSize 0 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: drop table statstable +PREHOOK: type: DROPTABLE +PREHOOK: Input: statsdb@statstable +PREHOOK: Output: statsdb@statstable +POSTHOOK: query: drop table statstable +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: statsdb@statstable +POSTHOOK: Output: statsdb@statstable +PREHOOK: query: create table srcpart like default.srcpart +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:statsdb +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: create table srcpart like default.srcpart +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:statsdb +POSTHOOK: Output: statsdb@srcpart +PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='11') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +PREHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12') +PREHOOK: type: LOAD +#### A masked pattern was here #### +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: load data local inpath '../../data/files/kv1.txt' overwrite into table srcpart partition (ds='2008-04-08', hr='12') +POSTHOOK: type: LOAD +#### A masked pattern was here #### +POSTHOOK: Output: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=12 +PREHOOK: query: analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: statsdb@srcpart +PREHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11 +PREHOOK: Output: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: query: analyze table srcpart partition (ds='2008-04-08', hr='11') compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +PREHOOK: query: describe formatted srcpart partition (ds='2008-04-08', hr='11') +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srcpart +POSTHOOK: query: describe formatted srcpart partition (ds='2008-04-08', hr='11') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srcpart +# col_name data_type comment + +key string default +value string default + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2008-04-08, 11] +Database: statsdb +Table: srcpart +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: alter table srcpart touch +PREHOOK: type: ALTERTABLE_TOUCH +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: alter table srcpart touch +POSTHOOK: type: ALTERTABLE_TOUCH +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart +PREHOOK: query: alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11') +PREHOOK: type: ALTERTABLE_RENAMEPART +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: query: alter table srcpart partition (ds='2008-04-08', hr='11') rename to partition (ds='2017-01-19', hr='11') +POSTHOOK: type: ALTERTABLE_RENAMEPART +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2008-04-08/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string) +PREHOOK: type: ALTERTABLE_ADDCOLS +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') add columns (newcol string) +POSTHOOK: type: ALTERTABLE_ADDCOLS +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') change key key string +PREHOOK: type: ALTERTABLE_RENAMECOL +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') change key key string +POSTHOOK: type: ALTERTABLE_RENAMECOL +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: alter table srcpart set tblproperties('testpartstats'='unchange') +PREHOOK: type: ALTERTABLE_PROPERTIES +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: alter table srcpart set tblproperties('testpartstats'='unchange') +POSTHOOK: type: ALTERTABLE_PROPERTIES +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart +PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srcpart +POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srcpart +# col_name data_type comment + +key string default +value string default +newcol string + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2017-01-19, 11] +Database: statsdb +Table: srcpart +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000') +PREHOOK: type: ALTERTABLE_UPDATEPARTSTATS +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: query: alter table srcpart partition (ds='2017-01-19', hr='11') update statistics set ('numRows' = '1000') +POSTHOOK: type: ALTERTABLE_UPDATEPARTSTATS +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srcpart +POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srcpart +# col_name data_type comment + +key string default +value string default +newcol string + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2017-01-19, 11] +Database: statsdb +Table: srcpart +#### A masked pattern was here #### +Partition Parameters: +#### A masked pattern was here #### + numFiles 1 + numRows 1000 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics +PREHOOK: type: QUERY +PREHOOK: Input: statsdb@srcpart +PREHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: Output: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: query: analyze table srcpart partition (ds='2017-01-19', hr='11') compute statistics +POSTHOOK: type: QUERY +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Input: statsdb@srcpart@ds=2017-01-19/hr=11 +POSTHOOK: Output: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart@ds=2017-01-19/hr=11 +PREHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +PREHOOK: type: DESCTABLE +PREHOOK: Input: statsdb@srcpart +POSTHOOK: query: describe formatted srcpart partition (ds='2017-01-19', hr='11') +POSTHOOK: type: DESCTABLE +POSTHOOK: Input: statsdb@srcpart +# col_name data_type comment + +key string default +value string default +newcol string + +# Partition Information +# col_name data_type comment + +ds string +hr string + +# Detailed Partition Information +Partition Value: [2017-01-19, 11] +Database: statsdb +Table: srcpart +#### A masked pattern was here #### +Partition Parameters: + COLUMN_STATS_ACCURATE {\"BASIC_STATS\":\"true\"} +#### A masked pattern was here #### + numFiles 1 + numRows 500 + rawDataSize 5312 + totalSize 5812 +#### A masked pattern was here #### + +# Storage Information +SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe +InputFormat: org.apache.hadoop.mapred.TextInputFormat +OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat +Compressed: No +Num Buckets: -1 +Bucket Columns: [] +Sort Columns: [] +Storage Desc Params: + serialization.format 1 +PREHOOK: query: drop table srcpart +PREHOOK: type: DROPTABLE +PREHOOK: Input: statsdb@srcpart +PREHOOK: Output: statsdb@srcpart +POSTHOOK: query: drop table srcpart +POSTHOOK: type: DROPTABLE +POSTHOOK: Input: statsdb@srcpart +POSTHOOK: Output: statsdb@srcpart