Index: hbase-handler/src/test/results/positive/hbase_stats2.q.out =================================================================== --- hbase-handler/src/test/results/positive/hbase_stats2.q.out (revision 1535192) +++ hbase-handler/src/test/results/positive/hbase_stats2.q.out (working copy) @@ -43,7 +43,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -175,11 +174,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information @@ -345,11 +339,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information Index: hbase-handler/src/test/results/positive/hbase_stats.q.out =================================================================== --- hbase-handler/src/test/results/positive/hbase_stats.q.out (revision 1535192) +++ hbase-handler/src/test/results/positive/hbase_stats.q.out (working copy) @@ -43,7 +43,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -175,11 +174,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information @@ -345,11 +339,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information Index: hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java =================================================================== --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java (revision 1535192) +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java (working copy) @@ -25,7 +25,7 @@ import java.util.Map; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; +import org.apache.hadoop.hive.common.StatsSetupConst; Index: metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java =================================================================== --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (revision 1535192) +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (working copy) @@ -467,7 +467,7 @@ } private static Partition makePartitionObject(String dbName, String tblName, - List ptnVals, Table tbl, String ptnLocationSuffix) { + List ptnVals, Table tbl, String ptnLocationSuffix) throws MetaException { Partition part4 = new Partition(); part4.setDbName(dbName); part4.setTableName(tblName); @@ -476,6 +476,7 @@ part4.setSd(tbl.getSd().deepCopy()); part4.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo().deepCopy()); part4.getSd().setLocation(tbl.getSd().getLocation() + ptnLocationSuffix); + MetaStoreUtils.updatePartitionStatsFast(part4, warehouse); return part4; } Index: metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (revision 1535192) +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (working copy) @@ -41,13 +41,17 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; @@ -155,6 +159,174 @@ } /** + * @param partParams + * @return True if the passed Parameters Map contains values for all "Fast Stats". + */ + public static boolean containsAllFastStats(Map partParams) { + List fastStats = StatsSetupConst.getStatsNoCollection(); + for (String stat : fastStats) { + if (!partParams.containsKey(stat)) { + return false; + } + } + return true; + } + + public static boolean updateUnpartitionedTableStatsFast(Database db, Table tbl, Warehouse wh) + throws MetaException { + return updateUnpartitionedTableStatsFast(db, tbl, wh, false, false); + } + + public static boolean updateUnpartitionedTableStatsFast(Database db, Table tbl, Warehouse wh, + boolean madeDir) throws MetaException { + return updateUnpartitionedTableStatsFast(db, tbl, wh, madeDir, false); + } + + /** + * Updates the numFiles and totalSize parameters for the passed unpartitioned Table by querying + * the warehouse if the passed Table does not already have values for these parameters. + * @param db + * @param tbl + * @param wh + * @param madeDir if true, the directory was just created and can be assumed to be empty + * @param forceRecompute Recompute stats even if the passed Table already has + * these parameters set + * @return true if the stats were updated, false otherwise + */ + public static boolean updateUnpartitionedTableStatsFast(Database db, Table tbl, Warehouse wh, + boolean madeDir, boolean forceRecompute) throws MetaException { + Map params = tbl.getParameters(); + boolean updated = false; + if (forceRecompute || + params == null || + !containsAllFastStats(params)) { + if (params == null) { + params = new HashMap(); + } + if (!madeDir) { + // The table location already exists and may contain data. + // Let's try to populate those stats that don't require full scan. + LOG.info("Updating table stats fast for " + tbl.getTableName()); + FileStatus[] fileStatus = wh.getFileStatusesForUnpartitionedTable(db, tbl); + params.put(StatsSetupConst.NUM_FILES, Integer.toString(fileStatus.length)); + long tableSize = 0L; + for (int i = 0; i < fileStatus.length; i++) { + tableSize += fileStatus[i].getLen(); + } + params.put(StatsSetupConst.TOTAL_SIZE, Long.toString(tableSize)); + LOG.info("Updated size of table " + tbl.getTableName() +" to "+ Long.toString(tableSize)); + if (params.containsKey(StatsSetupConst.ROW_COUNT) || + params.containsKey(StatsSetupConst.RAW_DATA_SIZE)) { + // TODO: Add a MetaStore flag indicating accuracy of these stats and update it here. + } + } + tbl.setParameters(params); + updated = true; + } + return updated; + } + + private static boolean doFastStatsExist(Map parameters) { + for (String stat : StatsSetupConst.getStatsNoCollection()) { + if (!parameters.containsKey(stat)) { + return false; + } + } + return true; + } + + public static boolean requireCalStats(Configuration hiveConf, Partition oldPart, + Partition newPart, Table tbl) { + + if (!HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER)) { + return false; + } + + if (MetaStoreUtils.isView(tbl)) { + return false; + } + + if (oldPart == null && newPart == null) { + return true; + } + + // requires to calculate stats if new partition doesn't have it + if ((newPart == null) || (newPart.getParameters() == null) + || !doFastStatsExist(newPart.getParameters())) { + return true; + } + + // requires to calculate stats if new and old have different stats + if ((oldPart != null) && (oldPart.getParameters() != null)) { + for (String stat : StatsSetupConst.getStatsNoCollection()) { + if (oldPart.getParameters().containsKey(stat)) { + Long oldStat = Long.parseLong(oldPart.getParameters().get(stat)); + Long newStat = Long.parseLong(newPart.getParameters().get(stat)); + if (oldStat != newStat) { + return true; + } + } + } + } + return false; + } + + public static boolean updatePartitionStatsFast(Partition part, Warehouse wh) + throws MetaException { + return updatePartitionStatsFast(part, wh, false, false); + } + + public static boolean updatePartitionStatsFast(Partition part, Warehouse wh, boolean madeDir) + throws MetaException { + return updatePartitionStatsFast(part, wh, madeDir, false); + } + + /** + * Updates the numFiles and totalSize parameters for the passed Partition by querying + * the warehouse if the passed Partition does not already have values for these parameters. + * @param part + * @param wh + * @param madeDir if true, the directory was just created and can be assumed to be empty + * @param forceRecompute Recompute stats even if the passed Partition already has + * these parameters set + * @return true if the stats were updated, false otherwise + */ + public static boolean updatePartitionStatsFast(Partition part, Warehouse wh, + boolean madeDir, boolean forceRecompute) throws MetaException { + Map params = part.getParameters(); + boolean updated = false; + if (forceRecompute || + params == null || + !containsAllFastStats(params)) { + if (params == null) { + params = new HashMap(); + } + if (!madeDir) { + // The partitition location already existed and may contain data. Lets try to + // populate those statistics that don't require a full scan of the data. + LOG.warn("Updating partition stats fast for: " + part.getTableName()); + FileStatus[] fileStatus = wh.getFileStatusesForPartition(part); + params.put(StatsSetupConst.NUM_FILES, Integer.toString(fileStatus.length)); + long partSize = 0L; + for (int i = 0; i < fileStatus.length; i++) { + partSize += fileStatus[i].getLen(); + } + params.put(StatsSetupConst.TOTAL_SIZE, Long.toString(partSize)); + LOG.warn("Updated size to " + Long.toString(partSize)); + if (params.containsKey(StatsSetupConst.ROW_COUNT) || + params.containsKey(StatsSetupConst.RAW_DATA_SIZE)) { + // The accuracy of these "collectable" stats at this point is suspect unless we know that + // StatsTask was just run before this MetaStore call and populated them. + // TODO: Add a MetaStore flag indicating accuracy of these stats and update it here. + } + } + part.setParameters(params); + updated = true; + } + return updated; + } + + /** * getDeserializer * * Get the Deserializer for a table given its name and properties. @@ -1136,6 +1308,13 @@ return filter.toString(); } + public static boolean isView(Table table) { + if (table == null) { + return false; + } + return TableType.VIRTUAL_VIEW.toString().equals(table.getTableType()); + } + /** * create listener instances as per the configuration. * Index: metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (revision 1535192) +++ metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (working copy) @@ -45,11 +45,15 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SkewedInfo; +import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; @@ -496,6 +500,63 @@ } /** + * @param partn + * @return array of FileStatus objects corresponding to the files making up the passed partition + */ + public FileStatus[] getFileStatusesForPartition(Partition partn) + throws MetaException { + try { + Path path = new Path(partn.getSd().getLocation()); + FileSystem fileSys = path.getFileSystem(conf); + /* consider sub-directory created from list bucketing. */ + int listBucketingDepth = calculateListBucketingDMLDepth(partn); + return HiveStatsUtils.getFileStatusRecurse(path, (1 + listBucketingDepth), fileSys); + } catch (IOException ioe) { + MetaStoreUtils.logAndThrowMetaException(ioe); + } + return null; + } + + /** + * List bucketing will introduce sub-directories. + * calculate it here in order to go to the leaf directory + * so that we can count right number of files. + * @param partn + * @return + */ + private static int calculateListBucketingDMLDepth(Partition partn) { + // list bucketing will introduce more files + int listBucketingDepth = 0; + SkewedInfo skewedInfo = partn.getSd().getSkewedInfo(); + if ((skewedInfo != null) && (skewedInfo.getSkewedColNames() != null) + && (skewedInfo.getSkewedColNames().size() > 0) + && (skewedInfo.getSkewedColValues() != null) + && (skewedInfo.getSkewedColValues().size() > 0) + && (skewedInfo.getSkewedColValueLocationMaps() != null) + && (skewedInfo.getSkewedColValueLocationMaps().size() > 0)) { + listBucketingDepth = skewedInfo.getSkewedColNames().size(); + } + return listBucketingDepth; + } + + /** + * @param table + * @return array of FileStatus objects corresponding to the files making up the passed + * unpartitioned table + */ + public FileStatus[] getFileStatusesForUnpartitionedTable(Database db, Table table) + throws MetaException { + Path tablePath = getTablePath(db, table.getTableName()); + try { + FileSystem fileSys = tablePath.getFileSystem(conf); + return HiveStatsUtils.getFileStatusRecurse(tablePath, 1, fileSys); + } catch (IOException ioe) { + MetaStoreUtils.logAndThrowMetaException(ioe); + } + return null; + } + + /** * Makes a valid partition name. * @param partCols The partition columns * @param vals The partition values Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1535192) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -1032,7 +1032,8 @@ ms.openTransaction(); - if (ms.getDatabase(tbl.getDbName()) == null) { + Database db = ms.getDatabase(tbl.getDbName()); + if (db == null) { throw new NoSuchObjectException("The database " + tbl.getDbName() + " does not exist"); } @@ -1066,6 +1067,14 @@ madeDir = true; } } + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl)) { + if (tbl.getPartitionKeysSize() == 0) { // Unpartitioned table + MetaStoreUtils.updateUnpartitionedTableStatsFast(db, tbl, wh, madeDir); + } else { // Partitioned table with no partitions. + MetaStoreUtils.updateUnpartitionedTableStatsFast(db, tbl, wh, true); + } + } // set create time long time = System.currentTimeMillis() / 1000; @@ -1543,6 +1552,11 @@ part.setCreateTime((int) time); part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time)); + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl)) { + MetaStoreUtils.updatePartitionStatsFast(part, wh, madeDir); + } + success = ms.addPartition(part); if (success) { success = ms.commitTransaction(); @@ -1763,6 +1777,11 @@ } } + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl)) { + MetaStoreUtils.updatePartitionStatsFast(part, wh, madeDir); + } + // set create time long time = System.currentTimeMillis() / 1000; part.setCreateTime((int) time); Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (revision 1535192) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (working copy) @@ -31,6 +31,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; @@ -195,6 +196,12 @@ msdb.alterPartition(dbname, name, part.getValues(), part); } } + } else if (MetaStoreUtils.requireCalStats(hiveConf, null, null, newt) && + (newt.getPartitionKeysSize() == 0)) { + Database db = msdb.getDatabase(newt.getDbName()); + // Update table stats. For partitioned table, we update stats in + // alterPartition() + MetaStoreUtils.updateUnpartitionedTableStatsFast(db, newt, wh, false, true); } // now finally call alter table msdb.alterTable(dbname, name, newt); @@ -254,10 +261,10 @@ Path destPath = null; FileSystem srcFs = null; FileSystem destFs = null; - Table tbl = null; Partition oldPart = null; String oldPartLoc = null; String newPartLoc = null; + // Set DDL time to now if not specified if (new_part.getParameters() == null || new_part.getParameters().get(hive_metastoreConstants.DDL_TIME) == null || @@ -265,10 +272,15 @@ new_part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(System .currentTimeMillis() / 1000)); } + + Table tbl = msdb.getTable(dbname, name); //alter partition if (part_vals == null || part_vals.size() == 0) { try { oldPart = msdb.getPartition(dbname, name, new_part.getValues()); + if (MetaStoreUtils.requireCalStats(hiveConf, oldPart, new_part, tbl)) { + MetaStoreUtils.updatePartitionStatsFast(new_part, wh, false, true); + } msdb.alterPartition(dbname, name, new_part.getValues(), new_part); } catch (InvalidObjectException e) { throw new InvalidOperationException("alter is not possible"); @@ -299,7 +311,6 @@ throw new AlreadyExistsException("Partition already exists:" + dbname + "." + name + "." + new_part.getValues()); } - tbl = msdb.getTable(dbname, name); if (tbl == null) { throw new InvalidObjectException( "Unable to rename partition because table or database do not exist"); @@ -351,6 +362,9 @@ + tbl.getTableName() + " " + new_part.getValues()); } new_part.getSd().setLocation(newPartLoc); + if (MetaStoreUtils.requireCalStats(hiveConf, oldPart, new_part, tbl)) { + MetaStoreUtils.updatePartitionStatsFast(new_part, wh, false, true); + } msdb.alterPartition(dbname, name, part_vals, new_part); } } @@ -399,6 +413,7 @@ MetaException { List oldParts = new ArrayList(); List> partValsList = new ArrayList>(); + Table tbl = msdb.getTable(dbname, name); try { for (Partition tmpPart: new_parts) { // Set DDL time to now if not specified @@ -408,9 +423,14 @@ tmpPart.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(System .currentTimeMillis() / 1000)); } + Partition oldTmpPart = msdb.getPartition(dbname, name, tmpPart.getValues()); oldParts.add(oldTmpPart); partValsList.add(tmpPart.getValues()); + + if (MetaStoreUtils.requireCalStats(hiveConf, oldTmpPart, tmpPart, tbl)) { + MetaStoreUtils.updatePartitionStatsFast(tmpPart, wh, false, true); + } } msdb.alterPartitions(dbname, name, partValsList, new_parts); } catch (InvalidObjectException e) { Index: ql/src/test/results/clientnegative/unset_table_property.q.out =================================================================== --- ql/src/test/results/clientnegative/unset_table_property.q.out (revision 1535192) +++ ql/src/test/results/clientnegative/unset_table_property.q.out (working copy) @@ -16,11 +16,13 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 FAILED: SemanticException [Error 10215]: Please use the following syntax if not sure whether the property existed or not: ALTER TABLE tableName UNSET TBLPROPERTIES IF EXISTS (key1, key2, ...) The following property z does not exist in testtable Index: ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out =================================================================== --- ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out (revision 1535192) +++ ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out (working copy) @@ -71,6 +71,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5293 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/merge4.q.out =================================================================== --- ql/src/test/results/clientpositive/merge4.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/merge4.q.out (working copy) @@ -2994,14 +2994,14 @@ POSTHOOK: Output: default@nzhang_part@ds=2010-08-15/hr=file, POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: show partitions nzhang_part @@ -3010,14 +3010,14 @@ POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] ds=2010-08-15/hr=11 @@ -3035,14 +3035,14 @@ #### A masked pattern was here #### POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] 1 1 2010-08-15 file, Index: ql/src/test/results/clientpositive/stats20.q.out =================================================================== --- ql/src/test/results/clientpositive/stats20.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats20.q.out (working copy) @@ -41,11 +41,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -100,11 +95,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 0 - totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/infer_bucket_sort.q.out =================================================================== --- ql/src/test/results/clientpositive/infer_bucket_sort.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/infer_bucket_sort.q.out (working copy) @@ -135,20 +135,20 @@ POSTHOOK: Output: default@test_table@part=1 POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -199,10 +199,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -211,10 +211,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -267,10 +267,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -281,10 +281,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -337,12 +337,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -353,12 +353,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -411,12 +411,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -429,12 +429,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] # col_name data_type comment @@ -489,13 +489,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -509,13 +509,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -573,13 +573,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -595,13 +595,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -661,13 +661,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -685,13 +685,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -751,6 +751,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -758,8 +760,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -777,6 +777,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -784,8 +786,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -845,6 +845,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -852,8 +854,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -873,6 +873,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -880,8 +882,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -943,6 +943,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -952,8 +954,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -973,6 +973,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -982,8 +984,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1045,6 +1045,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1056,8 +1058,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1077,6 +1077,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1088,8 +1090,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1151,6 +1151,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1164,8 +1166,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1185,6 +1185,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1198,8 +1200,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1261,6 +1261,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1276,8 +1278,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1297,6 +1297,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1312,8 +1314,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1375,6 +1375,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1392,8 +1394,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1413,6 +1413,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1430,8 +1432,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1493,6 +1493,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1512,8 +1514,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1533,6 +1533,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1552,8 +1554,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1615,6 +1615,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1636,8 +1638,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1657,6 +1657,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1678,8 +1680,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1743,6 +1743,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1764,8 +1766,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1787,6 +1787,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1808,8 +1810,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1875,6 +1875,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1896,8 +1898,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1921,6 +1921,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1942,8 +1944,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2009,6 +2009,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2032,8 +2034,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2057,6 +2057,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2080,8 +2082,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2147,6 +2147,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2172,8 +2174,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2197,6 +2197,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2222,8 +2224,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2291,6 +2291,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2316,8 +2318,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2343,6 +2343,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2368,8 +2370,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2439,6 +2439,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2464,8 +2466,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2493,6 +2493,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2518,8 +2520,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy) @@ -87,15 +87,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -133,15 +128,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats8.q.out =================================================================== --- ql/src/test/results/clientpositive/stats8.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats8.q.out (working copy) @@ -159,11 +159,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -752,11 +747,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 4 - numPartitions 4 - numRows 2000 - rawDataSize 21248 - totalSize 23248 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/create_like_view.q.out =================================================================== --- ql/src/test/results/clientpositive/create_like_view.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/create_like_view.q.out (working copy) @@ -136,6 +136,8 @@ Table Type: EXTERNAL_TABLE Table Parameters: EXTERNAL TRUE + numFiles 0 + totalSize 0 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/input_part7.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part7.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/input_part7.q.out (working copy) @@ -167,15 +167,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -213,15 +208,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy) @@ -263,15 +263,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -310,15 +305,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -762,7 +752,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -814,15 +803,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -861,15 +845,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -902,7 +881,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -939,7 +917,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -967,7 +944,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -986,7 +962,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1021,7 +996,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1049,7 +1023,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1068,7 +1041,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/pcr.q.out =================================================================== --- ql/src/test/results/clientpositive/pcr.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/pcr.q.out (working copy) @@ -140,15 +140,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -185,15 +180,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -341,15 +331,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -386,15 +371,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -431,15 +411,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -631,15 +606,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -676,15 +646,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -840,15 +805,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -885,15 +845,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1051,15 +1006,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1096,15 +1046,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1141,15 +1086,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1318,15 +1258,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1363,15 +1298,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1408,15 +1338,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1589,15 +1514,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1634,15 +1554,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1777,15 +1692,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1822,15 +1732,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2005,15 +1910,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2050,15 +1950,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2095,15 +1990,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2312,15 +2202,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2357,15 +2242,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2518,15 +2398,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2806,15 +2681,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2851,15 +2721,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3154,15 +3019,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3199,15 +3059,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3244,15 +3099,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3289,15 +3139,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3495,15 +3340,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3540,15 +3380,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3585,15 +3420,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3863,15 +3693,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4336,7 +4161,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4377,7 +4201,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4425,15 +4248,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4465,7 +4283,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4502,7 +4319,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4530,7 +4346,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4549,7 +4364,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4584,7 +4398,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4612,7 +4425,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4631,7 +4443,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4675,7 +4486,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4712,7 +4522,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4740,7 +4549,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4759,7 +4567,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4794,7 +4601,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4822,7 +4628,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4841,7 +4646,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4983,15 +4787,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5158,15 +4957,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5204,15 +4998,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5385,15 +5174,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5431,15 +5215,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/show_tblproperties.q.out =================================================================== --- ql/src/test/results/clientpositive/show_tblproperties.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/show_tblproperties.q.out (working copy) @@ -29,10 +29,12 @@ POSTHOOK: query: show tblproperties tmpfoo POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### tmp true #### A masked pattern was here #### bar bar value +totalSize 0 PREHOOK: query: show tblproperties tmpfoo("bar") PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: show tblproperties tmpfoo("bar") Index: ql/src/test/results/clientpositive/macro.q.out =================================================================== --- ql/src/test/results/clientpositive/macro.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/macro.q.out (working copy) @@ -108,7 +108,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -127,7 +126,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -260,7 +258,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -279,7 +276,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -433,7 +429,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -452,7 +447,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out =================================================================== --- ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/infer_bucket_sort_map_operators.q.out (working copy) @@ -579,12 +579,12 @@ POSTHOOK: Lineage: test_table1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value SIMPLE [(test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table_out PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table_out PARTITION (part = '1') @@ -593,12 +593,12 @@ POSTHOOK: Lineage: test_table1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value SIMPLE [(test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] # col_name data_type comment key string None @@ -647,12 +647,12 @@ POSTHOOK: Lineage: test_table1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value SIMPLE [(test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_TABREF (TOK_TABNAME test_table1) a) (TOK_TABREF (TOK_TABNAME test_table2) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test_table_out) (TOK_PARTSPEC (TOK_PARTVAL part '1')))) (TOK_SELECT (TOK_HINTLIST (TOK_HINT TOK_MAPJOIN (TOK_HINTARGLIST a))) (TOK_SELEXPR (. (TOK_TABLE_OR_COL b) value)) (TOK_SELEXPR (TOK_FUNCTIONSTAR count))) (TOK_GROUPBY (. (TOK_TABLE_OR_COL b) value)))) @@ -766,10 +766,10 @@ POSTHOOK: Lineage: test_table1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value SIMPLE [(test_table2)b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)a.null, (test_table2)b.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] @@ -782,10 +782,10 @@ POSTHOOK: Lineage: test_table1.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2.value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] +POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value SIMPLE [(test_table2)b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] -POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)test_table1.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).value EXPRESSION [(test_table1)a.null, (test_table2)b.null, ] POSTHOOK: Lineage: test_table_out PARTITION(part=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/stats3.q.out =================================================================== --- ql/src/test/results/clientpositive/stats3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats3.q.out (working copy) @@ -79,7 +79,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 11 @@ -233,11 +232,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 6 - rawDataSize 6 - totalSize 171 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -71,15 +71,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -193,7 +188,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -212,7 +206,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -235,7 +228,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -254,7 +246,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -298,15 +289,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -162,15 +162,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -208,15 +203,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/alter_partition_coltype.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_partition_coltype.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/alter_partition_coltype.q.out (working copy) @@ -162,15 +162,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 2 - numPartitions 2 - numRows 50 partition_columns dt/ts - rawDataSize 382 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 432 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -362,15 +357,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -520,15 +510,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -566,15 +551,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -612,15 +592,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -857,15 +832,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -903,15 +873,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -949,15 +914,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -1090,15 +1050,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -1136,15 +1091,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -1182,15 +1132,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype Index: ql/src/test/results/clientpositive/stats_noscan_1.q.out =================================================================== --- ql/src/test/results/clientpositive/stats_noscan_1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats_noscan_1.q.out (working copy) @@ -228,6 +228,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -271,6 +273,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -314,11 +318,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 0 - rawDataSize 0 - totalSize 11624 #### A masked pattern was here #### # Storage Information @@ -613,6 +612,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -664,6 +665,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out (working copy) @@ -156,15 +156,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -308,15 +303,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -439,15 +429,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -485,15 +470,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -584,15 +564,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -631,15 +606,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -678,15 +648,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -768,15 +733,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -867,15 +827,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -914,15 +869,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -961,15 +911,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1080,15 +1025,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/load_dyn_part8.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part8.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/load_dyn_part8.q.out (working copy) @@ -172,15 +172,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -218,15 +213,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -264,15 +254,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -310,15 +295,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out =================================================================== --- ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out (working copy) @@ -156,25 +156,25 @@ POSTHOOK: Output: default@test_table@ds=2008-04-08/hr=11 POSTHOOK: Output: default@test_table@ds=2008-04-08/hr=12 POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (ds='2008-04-08', hr='11') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (ds='2008-04-08', hr='11') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] # col_name data_type comment key string default @@ -215,13 +215,13 @@ POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (ds='2008-04-08', hr='12') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] # col_name data_type comment key string default @@ -283,33 +283,33 @@ POSTHOOK: Output: default@test_table@ds=2008-04-08/hr=11 POSTHOOK: Output: default@test_table@ds=2008-04-08/hr=12 POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (ds='2008-04-08', hr='11') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (ds='2008-04-08', hr='11') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] # col_name data_type comment key string default @@ -350,17 +350,17 @@ POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (ds='2008-04-08', hr='12') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] # col_name data_type comment key string default @@ -402,34 +402,34 @@ POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@srcpart_merge_dp POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: CREATE TABLE srcpart_merge_dp_rc LIKE srcpart PREHOOK: type: CREATETABLE POSTHOOK: query: CREATE TABLE srcpart_merge_dp_rc LIKE srcpart POSTHOOK: type: CREATETABLE POSTHOOK: Output: default@srcpart_merge_dp_rc POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: ALTER TABLE srcpart_merge_dp_rc SET FILEFORMAT RCFILE PREHOOK: type: ALTERTABLE_FILEFORMAT PREHOOK: Input: default@srcpart_merge_dp_rc @@ -439,17 +439,17 @@ POSTHOOK: Input: default@srcpart_merge_dp_rc POSTHOOK: Output: default@srcpart_merge_dp_rc POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11) PREHOOK: type: LOAD PREHOOK: Output: default@srcpart_merge_dp @@ -458,17 +458,17 @@ POSTHOOK: Output: default@srcpart_merge_dp POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11 POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket21.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11) PREHOOK: type: LOAD PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11 @@ -476,17 +476,17 @@ POSTHOOK: type: LOAD POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11 POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket22.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11) PREHOOK: type: LOAD PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11 @@ -494,17 +494,17 @@ POSTHOOK: type: LOAD POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11 POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket23.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=11) PREHOOK: type: LOAD PREHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11 @@ -512,17 +512,17 @@ POSTHOOK: type: LOAD POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=11 POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: LOAD DATA LOCAL INPATH '../data/files/srcbucket20.txt' INTO TABLE srcpart_merge_dp PARTITION(ds='2008-04-08', hr=12) PREHOOK: type: LOAD PREHOOK: Output: default@srcpart_merge_dp @@ -531,17 +531,17 @@ POSTHOOK: Output: default@srcpart_merge_dp POSTHOOK: Output: default@srcpart_merge_dp@ds=2008-04-08/hr=12 POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: INSERT OVERWRITE TABLE srcpart_merge_dp_rc PARTITION (ds = '2008-04-08', hr) SELECT key, value, hr FROM srcpart_merge_dp WHERE ds = '2008-04-08' PREHOOK: type: QUERY @@ -562,17 +562,17 @@ POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: -- Tests dynamic partitions where bucketing/sorting can be inferred, but some partitions are -- merged and some are moved. Currently neither should be bucketed or sorted, in the future, -- (ds='2008-04-08', hr='12') may be bucketed and sorted, (ds='2008-04-08', hr='11') should @@ -602,17 +602,17 @@ POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME srcpart))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTIONSTAR COUNT) value)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '2008-04-08')) (TOK_GROUPBY (TOK_TABLE_OR_COL key)))) a)) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test_table) (TOK_PARTSPEC (TOK_PARTVAL ds '2008-04-08') (TOK_PARTVAL hr)))) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value)) (TOK_SELEXPR (TOK_FUNCTION IF (== (% (TOK_TABLE_OR_COL key) 100) 0) '11' '12'))))) @@ -751,20 +751,20 @@ POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (ds='2008-04-08', hr='11') PREHOOK: type: DESCTABLE @@ -775,20 +775,20 @@ POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] # col_name data_type comment @@ -834,20 +834,20 @@ POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: srcpart_merge_dp_rc PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart_merge_dp)srcpart_merge_dp.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=11).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(ds=2008-04-08,hr=12).value EXPRESSION [(srcpart)srcpart.null, ] # col_name data_type comment Index: ql/src/test/results/clientpositive/sample9.q.out =================================================================== --- ql/src/test/results/clientpositive/sample9.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample9.q.out (working copy) @@ -69,7 +69,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -89,7 +88,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/describe_table.q.out =================================================================== --- ql/src/test/results/clientpositive/describe_table.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/describe_table.q.out (working copy) @@ -91,11 +91,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 4 - numPartitions 4 - numRows 0 - rawDataSize 0 - totalSize 23248 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy) @@ -104,15 +104,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -150,15 +145,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/groupby_sort_6.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_6.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/groupby_sort_6.q.out (working copy) @@ -259,7 +259,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -288,7 +287,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -424,15 +422,10 @@ columns.types string:string #### A masked pattern was here #### name default.t1 - numFiles 1 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct t1 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 @@ -473,7 +466,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -502,7 +494,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} Index: ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- ql/src/test/results/clientpositive/sample4.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -88,7 +88,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -108,7 +107,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/push_or.q.out =================================================================== --- ql/src/test/results/clientpositive/push_or.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/push_or.q.out (working copy) @@ -116,15 +116,10 @@ columns.types int:string #### A masked pattern was here #### name default.push_or - numFiles 2 - numPartitions 2 - numRows 40 partition_columns ds - rawDataSize 320 serialization.ddl struct push_or { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 360 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.push_or @@ -161,15 +156,10 @@ columns.types int:string #### A masked pattern was here #### name default.push_or - numFiles 2 - numPartitions 2 - numRows 40 partition_columns ds - rawDataSize 320 serialization.ddl struct push_or { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 360 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.push_or Index: ql/src/test/results/clientpositive/bucketcontext_7.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_7.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketcontext_7.q.out (working copy) @@ -139,15 +139,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -185,15 +180,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -292,15 +282,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -340,15 +325,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -495,15 +475,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -543,15 +518,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/groupby_sort_1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/groupby_sort_1.q.out (working copy) @@ -129,7 +129,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -150,7 +149,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -461,7 +459,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -482,7 +479,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -682,7 +678,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -712,7 +707,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -733,7 +727,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -771,7 +764,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -808,7 +800,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -836,7 +827,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -855,7 +845,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -890,7 +879,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -918,7 +906,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -937,7 +924,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1076,7 +1062,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1106,7 +1091,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1127,7 +1111,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1165,7 +1148,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1202,7 +1184,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1230,7 +1211,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1249,7 +1229,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1284,7 +1263,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1312,7 +1290,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1331,7 +1308,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1523,7 +1499,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1544,7 +1519,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1901,7 +1875,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1922,7 +1895,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2166,7 +2138,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2187,7 +2158,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2238,7 +2208,6 @@ #### A masked pattern was here #### name default.outputtbl3 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 25 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} @@ -2267,7 +2236,6 @@ #### A masked pattern was here #### name default.outputtbl3 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 25 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} @@ -2465,7 +2433,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2486,7 +2453,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2533,7 +2499,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2562,7 +2527,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2759,7 +2723,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2821,7 +2784,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2851,7 +2813,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2872,7 +2833,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2910,7 +2870,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2947,7 +2906,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2975,7 +2933,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2994,7 +2951,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3029,7 +2985,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3057,7 +3012,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3076,7 +3030,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3291,7 +3244,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3312,7 +3264,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3391,7 +3342,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3453,7 +3403,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3503,7 +3452,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3524,7 +3472,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3563,7 +3510,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3600,7 +3546,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3628,7 +3573,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3647,7 +3591,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3682,7 +3625,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3710,7 +3652,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3729,7 +3670,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3990,7 +3930,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4011,7 +3950,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4057,7 +3995,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 32 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4086,7 +4023,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 32 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4304,7 +4240,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4325,7 +4260,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4476,7 +4410,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4497,7 +4430,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4737,7 +4669,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -4758,7 +4689,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -4805,7 +4735,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4834,7 +4763,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -5053,7 +4981,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5083,7 +5010,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5104,7 +5030,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5142,7 +5067,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5179,7 +5103,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5207,7 +5130,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5226,7 +5148,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5261,7 +5182,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5289,7 +5209,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5308,7 +5227,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5615,7 +5533,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5636,7 +5553,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6062,7 +5978,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6092,7 +6007,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6113,7 +6027,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6151,7 +6064,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6188,7 +6100,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6216,7 +6127,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6235,7 +6145,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6270,7 +6179,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6298,7 +6206,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6317,7 +6224,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6594,7 +6500,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6624,7 +6529,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6645,7 +6549,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6683,7 +6586,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6720,7 +6622,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6748,7 +6649,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6767,7 +6667,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6802,7 +6701,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6830,7 +6728,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6849,7 +6746,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} Index: ql/src/test/results/clientpositive/stats13.q.out =================================================================== --- ql/src/test/results/clientpositive/stats13.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats13.q.out (working copy) @@ -77,10 +77,12 @@ columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart + numFiles 1 partition_columns ds/hr serialization.ddl struct analyze_srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -159,11 +161,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -254,6 +251,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -297,6 +296,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -340,6 +341,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/udf_reflect2.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reflect2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/udf_reflect2.q.out (working copy) @@ -203,7 +203,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -222,7 +221,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out (working copy) @@ -128,15 +128,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -226,15 +221,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -273,15 +263,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -320,15 +305,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -453,15 +433,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -551,15 +526,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -598,15 +568,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -645,15 +610,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -771,15 +731,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -877,15 +832,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -924,15 +874,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/rand_partitionpruner1.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (working copy) @@ -66,7 +66,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -85,7 +84,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/combine2_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/combine2_hadoop20.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/combine2_hadoop20.q.out (working copy) @@ -268,15 +268,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -313,15 +308,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -358,15 +348,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -403,15 +388,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -448,15 +428,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -493,15 +468,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -538,15 +508,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -583,15 +548,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 Index: ql/src/test/results/clientpositive/show_create_table_alter.q.out =================================================================== --- ql/src/test/results/clientpositive/show_create_table_alter.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/show_create_table_alter.q.out (working copy) @@ -68,7 +68,9 @@ #### A masked pattern was here #### TBLPROPERTIES ( 'EXTERNAL'='FALSE', + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: -- Alter the table comment, change the EXTERNAL property back and test SHOW CREATE TABLE on the change. ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -103,7 +105,9 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: -- Change the 'SORTBUCKETCOLSPREFIX' property and test SHOW CREATE TABLE. The output should not change. ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -138,7 +142,9 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: -- Alter the storage handler of the table, and test SHOW CREATE TABLE. ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -173,7 +179,9 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@tmp_showcrt1 Index: ql/src/test/results/clientpositive/bucketcontext_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketcontext_2.q.out (working copy) @@ -114,15 +114,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -221,15 +216,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -269,15 +259,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -422,15 +407,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -470,15 +450,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/auto_join_reordering_values.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_join_reordering_values.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_join_reordering_values.q.out (working copy) @@ -159,7 +159,6 @@ #### A masked pattern was here #### name default.orderpayment_small numFiles 1 - numPartitions 0 numRows 1 rawDataSize 36 serialization.ddl struct orderpayment_small { i32 dealid, string date, string time, i32 cityid, i32 userid} @@ -178,7 +177,6 @@ #### A masked pattern was here #### name default.orderpayment_small numFiles 1 - numPartitions 0 numRows 1 rawDataSize 36 serialization.ddl struct orderpayment_small { i32 dealid, string date, string time, i32 cityid, i32 userid} @@ -292,7 +290,6 @@ #### A masked pattern was here #### name default.orderpayment_small numFiles 1 - numPartitions 0 numRows 1 rawDataSize 36 serialization.ddl struct orderpayment_small { i32 dealid, string date, string time, i32 cityid, i32 userid} @@ -311,7 +308,6 @@ #### A masked pattern was here #### name default.orderpayment_small numFiles 1 - numPartitions 0 numRows 1 rawDataSize 36 serialization.ddl struct orderpayment_small { i32 dealid, string date, string time, i32 cityid, i32 userid} @@ -423,7 +419,6 @@ #### A masked pattern was here #### name default.orderpayment_small numFiles 1 - numPartitions 0 numRows 1 rawDataSize 36 serialization.ddl struct orderpayment_small { i32 dealid, string date, string time, i32 cityid, i32 userid} @@ -442,7 +437,6 @@ #### A masked pattern was here #### name default.orderpayment_small numFiles 1 - numPartitions 0 numRows 1 rawDataSize 36 serialization.ddl struct orderpayment_small { i32 dealid, string date, string time, i32 cityid, i32 userid} @@ -552,7 +546,6 @@ #### A masked pattern was here #### name default.user_small numFiles 1 - numPartitions 0 numRows 100 rawDataSize 288 serialization.ddl struct user_small { i32 userid} @@ -571,7 +564,6 @@ #### A masked pattern was here #### name default.user_small numFiles 1 - numPartitions 0 numRows 100 rawDataSize 288 serialization.ddl struct user_small { i32 userid} Index: ql/src/test/results/clientpositive/bucket2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucket2.q.out (working copy) @@ -60,7 +60,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -79,7 +78,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/infer_bucket_sort_multi_insert.q.out =================================================================== --- ql/src/test/results/clientpositive/infer_bucket_sort_multi_insert.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/infer_bucket_sort_multi_insert.q.out (working copy) @@ -259,10 +259,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -275,10 +275,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] # col_name data_type comment @@ -325,10 +325,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] # col_name data_type comment @@ -391,11 +391,11 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -411,11 +411,11 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -465,11 +465,11 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (working copy) @@ -114,15 +114,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -160,15 +155,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/parallel_orderby.q.out =================================================================== --- ql/src/test/results/clientpositive/parallel_orderby.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/parallel_orderby.q.out (working copy) @@ -115,7 +115,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 560 @@ -224,7 +223,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 560 Index: ql/src/test/results/clientpositive/filter_join_breaktask.q.out =================================================================== --- ql/src/test/results/clientpositive/filter_join_breaktask.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/filter_join_breaktask.q.out (working copy) @@ -116,15 +116,10 @@ columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask - numFiles 1 - numPartitions 1 - numRows 25 partition_columns ds - rawDataSize 211 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 236 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask @@ -253,15 +248,10 @@ columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask - numFiles 1 - numPartitions 1 - numRows 25 partition_columns ds - rawDataSize 211 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 236 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask Index: ql/src/test/results/clientpositive/join17.q.out =================================================================== --- ql/src/test/results/clientpositive/join17.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join17.q.out (working copy) @@ -74,7 +74,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -93,7 +92,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out (working copy) @@ -143,15 +143,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/input_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part9.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/input_part9.q.out (working copy) @@ -88,15 +88,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -134,15 +129,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketmapjoin7.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin7.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin7.q.out (working copy) @@ -101,15 +101,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -224,15 +219,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/alter_table_serde2.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_table_serde2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/alter_table_serde2.q.out (working copy) @@ -132,12 +132,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/bucketmapjoin11.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin11.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin11.q.out (working copy) @@ -168,15 +168,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -213,15 +208,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -319,15 +309,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -366,15 +351,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -502,15 +482,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -547,15 +522,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -653,15 +623,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -700,15 +665,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -164,15 +164,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy) @@ -110,15 +110,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 3 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -208,7 +203,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -228,7 +222,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} Index: ql/src/test/results/clientpositive/rcfile_default_format.q.out =================================================================== --- ql/src/test/results/clientpositive/rcfile_default_format.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/rcfile_default_format.q.out (working copy) @@ -56,7 +56,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 4812 totalSize 5293 @@ -104,7 +103,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 1406 totalSize 1906 @@ -147,7 +145,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -190,7 +187,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 4812 totalSize 5293 @@ -268,7 +264,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 4812 totalSize 5293 Index: ql/src/test/results/clientpositive/stats5.q.out =================================================================== --- ql/src/test/results/clientpositive/stats5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats5.q.out (working copy) @@ -54,7 +54,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 Index: ql/src/test/results/clientpositive/ppd_join_filter.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join_filter.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/ppd_join_filter.q.out (working copy) @@ -81,7 +81,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -100,7 +99,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -229,7 +227,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -248,7 +245,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -424,7 +420,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -443,7 +438,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -572,7 +566,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -591,7 +584,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -767,7 +759,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -786,7 +777,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -915,7 +905,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -934,7 +923,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1110,7 +1098,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1129,7 +1116,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1258,7 +1244,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1277,7 +1262,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -96,7 +96,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -115,7 +114,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -346,7 +344,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -365,7 +362,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -551,7 +547,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -570,7 +565,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -692,7 +686,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -711,7 +704,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -822,7 +814,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -841,7 +832,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -127,15 +127,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -254,15 +249,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -671,15 +661,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -751,7 +736,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -803,15 +787,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -843,7 +822,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -880,7 +858,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -908,7 +885,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -927,7 +903,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -962,7 +937,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -990,7 +964,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1009,7 +982,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1414,15 +1386,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1459,15 +1426,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1539,7 +1501,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1591,15 +1552,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -1631,7 +1587,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1668,7 +1623,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1696,7 +1650,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1715,7 +1668,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1750,7 +1702,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1778,7 +1729,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1797,7 +1747,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out (working copy) @@ -46,6 +46,9 @@ #### A masked pattern was here #### Partition Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -93,6 +96,9 @@ #### A masked pattern was here #### Partition Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -140,6 +146,9 @@ #### A masked pattern was here #### Partition Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -166,15 +166,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -669,7 +664,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -720,15 +714,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -760,7 +749,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -797,7 +785,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -825,7 +812,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -844,7 +830,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -879,7 +864,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -907,7 +891,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -926,7 +909,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} Index: ql/src/test/results/clientpositive/stats0.q.out =================================================================== --- ql/src/test/results/clientpositive/stats0.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats0.q.out (working copy) @@ -73,7 +73,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -92,7 +91,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1414,7 +1412,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1433,7 +1430,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/join9.q.out =================================================================== --- ql/src/test/results/clientpositive/join9.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -70,7 +70,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -89,7 +88,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -133,15 +131,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/smb_mapjoin_11.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_11.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/smb_mapjoin_11.q.out (working copy) @@ -155,15 +155,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table1 - numFiles 16 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 Index: ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out =================================================================== --- ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (working copy) @@ -99,15 +99,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -145,15 +140,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -292,15 +282,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -338,15 +323,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -384,15 +364,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -430,15 +405,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (working copy) @@ -160,15 +160,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -208,15 +203,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -361,15 +351,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -409,15 +394,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -541,15 +521,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -640,15 +615,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -688,15 +658,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -735,15 +700,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -826,15 +786,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -873,15 +828,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -972,15 +922,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1020,15 +965,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1067,15 +1007,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1185,15 +1120,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1233,15 +1163,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- ql/src/test/results/clientpositive/sample6.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -86,7 +86,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -106,7 +105,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -631,7 +629,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -651,7 +648,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -1008,7 +1004,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -1028,7 +1023,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -1639,7 +1633,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -1659,7 +1652,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -2113,7 +2105,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -2133,7 +2124,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -2573,7 +2563,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2593,7 +2582,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2617,7 +2605,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2637,7 +2624,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2878,7 +2864,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2898,7 +2883,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} Index: ql/src/test/results/clientpositive/join_filters_overlap.q.out =================================================================== --- ql/src/test/results/clientpositive/join_filters_overlap.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join_filters_overlap.q.out (working copy) @@ -105,7 +105,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -124,7 +123,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -314,7 +312,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -333,7 +330,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -523,7 +519,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -542,7 +537,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -750,7 +744,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -769,7 +762,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -981,7 +973,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -1000,7 +991,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} Index: ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out =================================================================== --- ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out (working copy) @@ -75,6 +75,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -117,6 +120,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -163,6 +169,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -205,6 +214,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -246,6 +258,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/bucket_map_join_1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_map_join_1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucket_map_join_1.q.out (working copy) @@ -126,7 +126,6 @@ #### A masked pattern was here #### name default.table1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct table1 { string key, string value} @@ -147,7 +146,6 @@ #### A masked pattern was here #### name default.table1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct table1 { string key, string value} Index: ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- ql/src/test/results/clientpositive/sample1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -112,15 +112,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats15.q.out =================================================================== --- ql/src/test/results/clientpositive/stats15.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats15.q.out (working copy) @@ -43,7 +43,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -175,11 +174,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information @@ -345,11 +339,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/stats_partscan_1.q.out =================================================================== --- ql/src/test/results/clientpositive/stats_partscan_1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats_partscan_1.q.out (working copy) @@ -81,6 +81,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5293 #### A masked pattern was here #### # Storage Information @@ -224,6 +226,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5293 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/reduce_deduplicate.q.out =================================================================== --- ql/src/test/results/clientpositive/reduce_deduplicate.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/reduce_deduplicate.q.out (working copy) @@ -63,7 +63,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -82,7 +81,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy) @@ -93,15 +93,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -222,15 +217,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_4.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketcontext_4.q.out (working copy) @@ -126,15 +126,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -172,15 +167,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -279,15 +269,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -431,15 +416,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/stats10.q.out =================================================================== --- ql/src/test/results/clientpositive/stats10.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats10.q.out (working copy) @@ -550,11 +550,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 4 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/bucket4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket4.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucket4.q.out (working copy) @@ -63,7 +63,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -82,7 +81,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/merge3.q.out =================================================================== --- ql/src/test/results/clientpositive/merge3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/merge3.q.out (working copy) @@ -117,7 +117,6 @@ #### A masked pattern was here #### name default.merge_src numFiles 4 - numPartitions 0 numRows 2000 rawDataSize 21248 serialization.ddl struct merge_src { string key, string value} @@ -136,7 +135,6 @@ #### A masked pattern was here #### name default.merge_src numFiles 4 - numPartitions 0 numRows 2000 rawDataSize 21248 serialization.ddl struct merge_src { string key, string value} @@ -2344,7 +2342,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 2000 rawDataSize 21248 totalSize 23248 @@ -2473,15 +2470,10 @@ columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 4 - numPartitions 2 - numRows 2000 partition_columns ds - rawDataSize 21248 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -2518,15 +2510,10 @@ columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 4 - numPartitions 2 - numRows 2000 partition_columns ds - rawDataSize 21248 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -4904,15 +4891,10 @@ columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 4 - numPartitions 2 - numRows 2000 partition_columns ds - rawDataSize 21248 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -4949,15 +4931,10 @@ columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 4 - numPartitions 2 - numRows 2000 partition_columns ds - rawDataSize 21248 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part Index: ql/src/test/results/clientpositive/udtf_explode.q.out =================================================================== --- ql/src/test/results/clientpositive/udtf_explode.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/udtf_explode.q.out (working copy) @@ -70,7 +70,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -89,7 +88,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -157,7 +155,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -176,7 +173,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -435,7 +431,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -454,7 +449,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out (working copy) @@ -178,15 +178,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -223,15 +218,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -321,15 +311,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -368,15 +353,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/binary_output_format.q.out =================================================================== --- ql/src/test/results/clientpositive/binary_output_format.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/binary_output_format.q.out (working copy) @@ -130,7 +130,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -149,7 +148,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketmapjoin9.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin9.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin9.q.out (working copy) @@ -114,15 +114,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 3 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 4200 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -212,15 +207,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -384,15 +374,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 5 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6950 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -482,15 +467,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/bucketmapjoin13.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin13.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin13.q.out (working copy) @@ -142,15 +142,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -240,15 +235,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -287,15 +277,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -437,15 +422,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -543,15 +523,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -704,15 +679,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -810,15 +780,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -973,15 +938,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -1079,15 +1039,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/stats7.q.out =================================================================== --- ql/src/test/results/clientpositive/stats7.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats7.q.out (working copy) @@ -210,11 +210,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/show_create_table_serde.q.out =================================================================== --- ql/src/test/results/clientpositive/show_create_table_serde.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/show_create_table_serde.q.out (working copy) @@ -37,7 +37,9 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@tmp_showcrt1 Index: ql/src/test/results/clientpositive/smb_mapjoin_18.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_18.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/smb_mapjoin_18.q.out (working copy) @@ -500,10 +500,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select count(*) from test_table2 where ds = '3' PREHOOK: type: QUERY PREHOOK: Input: default@test_table2 @@ -516,10 +516,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 PREHOOK: query: select count(*) from test_table2 where ds = '3' and hash(key) % 2 = 0 PREHOOK: type: QUERY @@ -533,10 +533,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 PREHOOK: query: select count(*) from test_table2 where ds = '3' and hash(key) % 2 = 1 PREHOOK: type: QUERY @@ -550,10 +550,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 PREHOOK: query: select count(*) from test_table2 tablesample (bucket 1 out of 2) s where ds = '3' PREHOOK: type: QUERY @@ -567,10 +567,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 PREHOOK: query: select count(*) from test_table2 tablesample (bucket 2 out of 2) s where ds = '3' PREHOOK: type: QUERY @@ -584,8 +584,8 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out (working copy) @@ -204,7 +204,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -224,7 +223,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -661,7 +659,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -692,7 +689,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -712,7 +708,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -750,7 +745,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -787,7 +781,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -815,7 +808,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -834,7 +826,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -869,7 +860,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -897,7 +887,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -916,7 +905,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/union22.q.out =================================================================== --- ql/src/test/results/clientpositive/union22.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/union22.q.out (working copy) @@ -136,15 +136,10 @@ columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 16936 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 17436 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -277,15 +272,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -322,15 +312,10 @@ columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 16936 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 17436 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -372,15 +357,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -435,15 +415,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -505,15 +480,10 @@ columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 16936 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 17436 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -538,15 +508,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -651,15 +616,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -696,15 +656,10 @@ columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 16936 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 17436 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta Index: ql/src/test/results/clientpositive/stats2.q.out =================================================================== --- ql/src/test/results/clientpositive/stats2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats2.q.out (working copy) @@ -221,11 +221,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 4 - numPartitions 4 - numRows 2000 - rawDataSize 21248 - totalSize 23248 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/smb_mapjoin_13.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_13.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/smb_mapjoin_13.q.out (working copy) @@ -142,7 +142,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} @@ -163,7 +162,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} @@ -346,7 +344,6 @@ #### A masked pattern was here #### name default.test_table3 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table3 { i32 key, string value} @@ -367,7 +364,6 @@ #### A masked pattern was here #### name default.test_table3 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table3 { i32 key, string value} Index: ql/src/test/results/clientpositive/unset_table_view_property.q.out =================================================================== --- ql/src/test/results/clientpositive/unset_table_view_property.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/unset_table_view_property.q.out (working copy) @@ -24,11 +24,13 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET all the properties ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'c') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -44,7 +46,9 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@testtable @@ -59,11 +63,13 @@ POSTHOOK: type: SHOW_TBLPROPERTIES d 4 +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET a subset of the properties ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'd') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -79,9 +85,11 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- the same property being UNSET multiple times ALTER TABLE testTable UNSET TBLPROPERTIES ('c', 'c', 'c') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -97,7 +105,9 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@testtable @@ -112,12 +122,14 @@ POSTHOOK: type: SHOW_TBLPROPERTIES d 4 +numFiles 0 #### A masked pattern was here #### b 2 c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -133,11 +145,13 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -153,9 +167,11 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET VIEW PROPERTIES CREATE VIEW testView AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -71,15 +71,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -193,7 +188,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -212,7 +206,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -235,7 +228,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -254,7 +246,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -298,15 +289,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/ctas_uses_database_location.q.out =================================================================== --- ql/src/test/results/clientpositive/ctas_uses_database_location.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/ctas_uses_database_location.q.out (working copy) @@ -145,7 +145,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -108,15 +108,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (working copy) @@ -129,7 +129,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -150,7 +149,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -460,7 +458,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -481,7 +478,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -759,7 +755,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -789,7 +784,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -810,7 +804,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -848,7 +841,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -885,7 +877,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -913,7 +904,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -932,7 +922,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -967,7 +956,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -995,7 +983,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1014,7 +1001,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1153,7 +1139,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1183,7 +1168,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1204,7 +1188,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1242,7 +1225,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1279,7 +1261,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1307,7 +1288,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1326,7 +1306,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1361,7 +1340,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1389,7 +1367,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1408,7 +1385,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1600,7 +1576,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1621,7 +1596,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1975,7 +1949,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1996,7 +1969,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2323,7 +2295,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2344,7 +2315,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2473,7 +2443,6 @@ #### A masked pattern was here #### name default.outputtbl3 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 25 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} @@ -2502,7 +2471,6 @@ #### A masked pattern was here #### name default.outputtbl3 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 25 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} @@ -2701,7 +2669,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2722,7 +2689,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2841,7 +2807,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2870,7 +2835,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3067,7 +3031,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3129,7 +3092,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3159,7 +3121,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3180,7 +3141,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3218,7 +3178,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3255,7 +3214,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3283,7 +3241,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3302,7 +3259,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3337,7 +3293,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3365,7 +3320,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3384,7 +3338,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3600,7 +3553,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3621,7 +3573,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3772,7 +3723,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3834,7 +3784,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3884,7 +3833,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3905,7 +3853,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3944,7 +3891,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3981,7 +3927,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4009,7 +3954,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4028,7 +3972,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4063,7 +4006,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4091,7 +4033,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4110,7 +4051,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4371,7 +4311,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4392,7 +4331,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4438,7 +4376,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 32 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4467,7 +4404,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 32 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4684,7 +4620,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4705,7 +4640,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4934,7 +4868,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4955,7 +4888,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -5196,7 +5128,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5217,7 +5148,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5336,7 +5266,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -5365,7 +5294,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -5584,7 +5512,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5614,7 +5541,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5635,7 +5561,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5673,7 +5598,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5710,7 +5634,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5738,7 +5661,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5757,7 +5679,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5792,7 +5713,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5820,7 +5740,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5839,7 +5758,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6146,7 +6064,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6167,7 +6084,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6593,7 +6509,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6623,7 +6538,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6644,7 +6558,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6682,7 +6595,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6719,7 +6631,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6747,7 +6658,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6766,7 +6676,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6801,7 +6710,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6829,7 +6737,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6848,7 +6755,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7125,7 +7031,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7155,7 +7060,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -7176,7 +7080,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -7214,7 +7117,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7251,7 +7153,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7279,7 +7180,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7298,7 +7198,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7333,7 +7232,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7361,7 +7259,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7380,7 +7277,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_8.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_8.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_8.q.out (working copy) @@ -319,12 +319,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -339,12 +339,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 0 val_0val_0 1 0 0 val_0val_0 1 0 0 val_0val_0 1 @@ -371,12 +371,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 5 val_5val_5 1 5 5 val_5val_5 1 5 5 val_5val_5 1 Index: ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out (working copy) @@ -169,15 +169,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -217,15 +212,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -372,15 +362,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -420,15 +405,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -556,15 +536,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -602,15 +577,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -701,15 +671,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -749,15 +714,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -796,15 +756,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -843,15 +798,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -934,15 +884,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -981,15 +926,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1080,15 +1020,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1128,15 +1063,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1175,15 +1105,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1222,15 +1147,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1341,15 +1261,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1389,15 +1304,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out (working copy) @@ -269,10 +269,10 @@ POSTHOOK: Output: default@test_table2@ds=1 POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select count(*) from test_table2 where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table2 @@ -285,10 +285,10 @@ #### A masked pattern was here #### POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 500 PREHOOK: query: select count(*) from test_table2 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY @@ -302,10 +302,10 @@ #### A masked pattern was here #### POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 500 PREHOOK: query: select count(*) from test_table2 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY @@ -319,8 +319,8 @@ #### A masked pattern was here #### POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 Index: ql/src/test/results/clientpositive/columnstats_partlvl.q.out =================================================================== --- ql/src/test/results/clientpositive/columnstats_partlvl.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/columnstats_partlvl.q.out (working copy) @@ -164,15 +164,10 @@ field.delim | #### A masked pattern was here #### name default.employee_part - numFiles 2 - numPartitions 2 - numRows 0 partition_columns employeesalary - rawDataSize 0 serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 210 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.employee_part @@ -374,15 +369,10 @@ field.delim | #### A masked pattern was here #### name default.employee_part - numFiles 2 - numPartitions 2 - numRows 0 partition_columns employeesalary - rawDataSize 0 serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 210 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.employee_part Index: ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out (working copy) @@ -219,7 +219,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -240,7 +239,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -370,7 +368,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -391,7 +388,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -515,7 +511,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -536,7 +531,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -658,7 +652,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -679,7 +672,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -801,7 +793,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -822,7 +813,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -944,7 +934,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -965,7 +954,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -1087,7 +1075,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -1108,7 +1095,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -1230,7 +1216,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -1251,7 +1236,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -1373,7 +1357,6 @@ #### A masked pattern was here #### name default.test3 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test3 { string key, string value} @@ -1394,7 +1377,6 @@ #### A masked pattern was here #### name default.test3 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test3 { string key, string value} Index: ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out (working copy) @@ -144,15 +144,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -296,15 +291,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -427,15 +417,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -473,15 +458,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -572,15 +552,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -619,15 +594,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -666,15 +636,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -756,15 +721,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -855,15 +815,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -902,15 +857,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -949,15 +899,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1068,15 +1013,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/sample8.q.out =================================================================== --- ql/src/test/results/clientpositive/sample8.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample8.q.out (working copy) @@ -102,15 +102,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -148,15 +143,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -194,15 +184,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -240,15 +225,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/transform_ppr2.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/transform_ppr2.q.out (working copy) @@ -111,15 +111,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -157,15 +152,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/union_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/union_ppr.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/union_ppr.q.out (working copy) @@ -169,15 +169,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -215,15 +210,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/serde_user_properties.q.out =================================================================== --- ql/src/test/results/clientpositive/serde_user_properties.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/serde_user_properties.q.out (working copy) @@ -61,7 +61,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -80,7 +79,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -158,7 +156,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -177,7 +174,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -259,7 +255,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -278,7 +273,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -296,6 +290,7 @@ Fetch Operator limit: -1 + PREHOOK: query: explain extended select key from src ('user.defined.key'='some.value') PREHOOK: type: QUERY POSTHOOK: query: explain extended select key from src ('user.defined.key'='some.value') @@ -355,7 +350,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -375,7 +369,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -458,7 +451,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -478,7 +470,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -497,7 +488,6 @@ Fetch Operator limit: -1 - PREHOOK: query: explain extended select a.key from src ('user.defined.key'='some.value') a PREHOOK: type: QUERY POSTHOOK: query: explain extended select a.key from src ('user.defined.key'='some.value') a @@ -557,7 +547,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -577,7 +566,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -660,7 +648,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -680,7 +667,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -699,4 +685,3 @@ Fetch Operator limit: -1 - Index: ql/src/test/results/clientpositive/alter_table_not_sorted.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_table_not_sorted.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/alter_table_not_sorted.q.out (working copy) @@ -60,6 +60,9 @@ Table Parameters: SORTBUCKETCOLSPREFIX TRUE #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table.q.out (working copy) @@ -190,12 +190,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -249,12 +243,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -347,12 +335,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -450,12 +432,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -510,12 +486,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -570,12 +540,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -630,12 +594,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/ctas_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/ctas_hadoop20.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/ctas_hadoop20.q.out (working copy) @@ -161,7 +161,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 96 totalSize 106 @@ -323,7 +322,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 96 totalSize 106 @@ -486,7 +484,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 120 totalSize 199 @@ -553,7 +550,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 120 totalSize 199 @@ -716,7 +712,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 96 totalSize 106 @@ -790,7 +785,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -809,7 +803,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/ppd_vc.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_vc.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/ppd_vc.q.out (working copy) @@ -95,15 +95,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -141,15 +136,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -187,15 +177,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -233,15 +218,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -401,7 +381,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -420,7 +399,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -464,15 +442,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -510,15 +483,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -556,15 +524,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -602,15 +565,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_6.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_6.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketcontext_6.q.out (working copy) @@ -173,15 +173,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -221,15 +216,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -372,15 +362,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -420,15 +405,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out =================================================================== --- ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/dynamic_partition_skip_default.q.out (working copy) @@ -129,15 +129,10 @@ columns.types int #### A masked pattern was here #### name default.dynamic_part_table - numFiles 4 - numPartitions 4 - numRows 4 partition_columns partcol1/partcol2 - rawDataSize 4 serialization.ddl struct dynamic_part_table { i32 intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dynamic_part_table @@ -149,6 +144,7 @@ Fetch Operator limit: -1 + PREHOOK: query: explain extended select intcol from dynamic_part_table where partcol1=1 and partcol2=1 PREHOOK: type: QUERY POSTHOOK: query: explain extended select intcol from dynamic_part_table where partcol1=1 and partcol2=1 @@ -233,15 +229,10 @@ columns.types int #### A masked pattern was here #### name default.dynamic_part_table - numFiles 4 - numPartitions 4 - numRows 4 partition_columns partcol1/partcol2 - rawDataSize 4 serialization.ddl struct dynamic_part_table { i32 intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dynamic_part_table @@ -253,6 +244,7 @@ Fetch Operator limit: -1 + PREHOOK: query: explain extended select intcol from dynamic_part_table where (partcol1=1 and partcol2=1)or (partcol1=1 and partcol2='__HIVE_DEFAULT_PARTITION__') PREHOOK: type: QUERY POSTHOOK: query: explain extended select intcol from dynamic_part_table where (partcol1=1 and partcol2=1)or (partcol1=1 and partcol2='__HIVE_DEFAULT_PARTITION__') @@ -337,15 +329,10 @@ columns.types int #### A masked pattern was here #### name default.dynamic_part_table - numFiles 4 - numPartitions 4 - numRows 4 partition_columns partcol1/partcol2 - rawDataSize 4 serialization.ddl struct dynamic_part_table { i32 intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dynamic_part_table @@ -383,15 +370,10 @@ columns.types int #### A masked pattern was here #### name default.dynamic_part_table - numFiles 4 - numPartitions 4 - numRows 4 partition_columns partcol1/partcol2 - rawDataSize 4 serialization.ddl struct dynamic_part_table { i32 intcol} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dynamic_part_table Index: ql/src/test/results/clientpositive/stats12.q.out =================================================================== --- ql/src/test/results/clientpositive/stats12.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats12.q.out (working copy) @@ -77,10 +77,12 @@ columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart + numFiles 1 partition_columns ds/hr serialization.ddl struct analyze_srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -114,10 +116,12 @@ columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart + numFiles 1 partition_columns ds/hr serialization.ddl struct analyze_srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -201,11 +205,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information @@ -343,6 +342,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -386,6 +387,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/router_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy) @@ -90,7 +90,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -109,7 +108,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -153,15 +151,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -199,15 +192,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -245,15 +233,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -291,15 +274,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -499,7 +477,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -518,7 +495,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -562,15 +538,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -608,15 +579,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -805,7 +771,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -824,7 +789,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -868,15 +832,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -914,15 +873,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1113,7 +1067,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1132,7 +1085,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1176,15 +1128,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1222,15 +1169,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1268,15 +1210,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1314,15 +1251,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketcontext_1.q.out (working copy) @@ -126,15 +126,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -233,15 +228,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -281,15 +271,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -434,15 +419,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -482,15 +462,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucket1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucket1.q.out (working copy) @@ -60,7 +60,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -79,7 +78,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/input42.q.out =================================================================== --- ql/src/test/results/clientpositive/input42.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/input42.q.out (working copy) @@ -83,15 +83,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -129,15 +124,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1276,15 +1266,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1322,15 +1307,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1847,15 +1827,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1893,15 +1868,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats9.q.out =================================================================== --- ql/src/test/results/clientpositive/stats9.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats9.q.out (working copy) @@ -68,7 +68,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 1000 rawDataSize 10603 totalSize 11603 Index: ql/src/test/results/clientpositive/insert_into5.q.out =================================================================== --- ql/src/test/results/clientpositive/insert_into5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/insert_into5.q.out (working copy) @@ -493,10 +493,10 @@ POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5a.value SIMPLE [] POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b ) t @@ -515,10 +515,10 @@ POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5a.value SIMPLE [] POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] -37252105840 PREHOOK: query: DROP TABLE insert_into5a PREHOOK: type: DROPTABLE @@ -532,7 +532,7 @@ POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5a.value SIMPLE [] POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin10.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin10.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin10.q.out (working copy) @@ -162,15 +162,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 5 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6950 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -207,15 +202,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 5 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6950 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -305,15 +295,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 5 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6950 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -352,15 +337,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 5 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6950 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/union24.q.out =================================================================== --- ql/src/test/results/clientpositive/union24.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/union24.q.out (working copy) @@ -112,7 +112,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -131,7 +130,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -353,7 +351,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -372,7 +369,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -395,7 +391,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -414,7 +409,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -437,7 +431,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -456,7 +449,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -646,7 +638,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -665,7 +656,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -688,7 +678,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -707,7 +696,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -892,7 +880,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -911,7 +898,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -934,7 +920,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -953,7 +938,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -1130,7 +1114,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -1149,7 +1132,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -1172,7 +1154,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -1191,7 +1172,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -1462,7 +1442,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -1481,7 +1460,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -1504,7 +1482,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -1523,7 +1500,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} Index: ql/src/test/results/clientpositive/stats4.q.out =================================================================== --- ql/src/test/results/clientpositive/stats4.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats4.q.out (working copy) @@ -2544,11 +2544,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information @@ -2592,11 +2587,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/columnstats_tbllvl.q.out =================================================================== --- ql/src/test/results/clientpositive/columnstats_tbllvl.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/columnstats_tbllvl.q.out (working copy) @@ -178,7 +178,6 @@ #### A masked pattern was here #### name default.uservisits_web_text_none numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} @@ -198,7 +197,6 @@ #### A masked pattern was here #### name default.uservisits_web_text_none numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} Index: ql/src/test/results/clientpositive/smb_mapjoin_15.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_15.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/smb_mapjoin_15.q.out (working copy) @@ -116,7 +116,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} @@ -137,7 +136,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} @@ -368,7 +366,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -389,7 +386,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -560,7 +556,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -581,7 +576,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -784,7 +778,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -805,7 +798,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -202,7 +202,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -221,7 +220,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -244,7 +242,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -263,7 +260,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -388,15 +388,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -494,7 +489,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -514,7 +508,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -959,7 +952,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1011,15 +1003,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -1051,7 +1038,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1088,7 +1074,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1116,7 +1101,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1135,7 +1119,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1170,7 +1153,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1198,7 +1180,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1217,7 +1198,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/sample10.q.out =================================================================== --- ql/src/test/results/clientpositive/sample10.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample10.q.out (working copy) @@ -133,15 +133,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 16 - numPartitions 4 - numRows 40 partition_columns ds/hr - rawDataSize 240 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 1228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -181,15 +176,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 16 - numPartitions 4 - numRows 40 partition_columns ds/hr - rawDataSize 240 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 1228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -229,15 +219,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 16 - numPartitions 4 - numRows 40 partition_columns ds/hr - rawDataSize 240 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 1228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -277,15 +262,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 16 - numPartitions 4 - numRows 40 partition_columns ds/hr - rawDataSize 240 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 1228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_5.q.out (working copy) @@ -692,10 +692,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -710,10 +710,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 8 val_8val_8 1 4 val_4val_4 1 2 val_2val_2 1 @@ -740,10 +740,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 9 val_9val_9 1 5 val_5val_5 1 5 val_5val_5 1 Index: ql/src/test/results/clientpositive/stats_noscan_2.q.out =================================================================== --- ql/src/test/results/clientpositive/stats_noscan_2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats_noscan_2.q.out (working copy) @@ -46,11 +46,10 @@ Table Type: EXTERNAL_TABLE Table Parameters: EXTERNAL TRUE - numFiles 1 - numPartitions 0 + numFiles 0 numRows 6 rawDataSize 6 - totalSize 11 + totalSize 0 #### A masked pattern was here #### # Storage Information @@ -88,11 +87,10 @@ Table Type: EXTERNAL_TABLE Table Parameters: EXTERNAL TRUE - numFiles 1 - numPartitions 0 + numFiles 0 numRows 0 rawDataSize 0 - totalSize 11 + totalSize 0 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/partition_date2.q.out =================================================================== --- ql/src/test/results/clientpositive/partition_date2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/partition_date2.q.out (working copy) @@ -94,10 +94,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from partition_date2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_date2_1 @@ -116,10 +116,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 1999-01-01 2 238 val_238 2000-01-01 1 changed_key changed_value 2000-01-01 2 @@ -136,10 +136,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select distinct dt from partition_date2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_date2_1 @@ -158,10 +158,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 1999-01-01 2000-01-01 PREHOOK: query: select * from partition_date2_1 @@ -182,10 +182,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 1999-01-01 2 238 val_238 2000-01-01 1 PREHOOK: query: -- alter table add partition @@ -201,10 +201,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select distinct dt from partition_date2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_date2_1 @@ -225,10 +225,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 1980-01-02 1999-01-01 2000-01-01 @@ -252,10 +252,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 1999-01-01 2 238 val_238 2000-01-01 1 PREHOOK: query: -- alter table drop @@ -272,10 +272,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select distinct dt from partition_date2_1 PREHOOK: type: QUERY PREHOOK: Input: default@partition_date2_1 @@ -294,10 +294,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 1980-01-02 2000-01-01 PREHOOK: query: select * from partition_date2_1 @@ -318,10 +318,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 2000-01-01 1 PREHOOK: query: -- alter table set serde alter table partition_date2_1 partition(dt=date '1980-01-02', region=3) @@ -340,10 +340,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: -- alter table set fileformat alter table partition_date2_1 partition(dt=date '1980-01-02', region=3) set fileformat rcfile @@ -361,10 +361,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: describe extended partition_date2_1 partition(dt=date '1980-01-02', region=3) PREHOOK: type: DESCTABLE POSTHOOK: query: describe extended partition_date2_1 partition(dt=date '1980-01-02', region=3) @@ -373,10 +373,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] key string None value string None dt date None @@ -405,10 +405,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: select * from partition_date2_1 order by key,value,dt,region PREHOOK: type: QUERY PREHOOK: Input: default@partition_date2_1 @@ -429,10 +429,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] 238 val_238 1980-01-02 3 238 val_238 2000-01-01 1 86 val_86 1980-01-02 3 @@ -455,10 +455,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: describe extended partition_date2_1 partition(dt=date '1980-01-02', region=3) PREHOOK: type: DESCTABLE POSTHOOK: query: describe extended partition_date2_1 partition(dt=date '1980-01-02', region=3) @@ -469,10 +469,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] key string None value string None dt date None @@ -502,10 +502,10 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: drop table partition_date2_1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@partition_date2_1 @@ -520,7 +520,7 @@ POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=1999-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [] POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: partition_date2_1 PARTITION(dt=2000-01-01,region=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out (working copy) @@ -108,7 +108,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -129,7 +128,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -252,7 +250,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -273,7 +270,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -427,7 +423,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -448,7 +443,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -486,7 +480,6 @@ #### A masked pattern was here #### name default.bucket_small numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} @@ -604,7 +597,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -625,7 +617,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -663,7 +654,6 @@ #### A masked pattern was here #### name default.bucket_small numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} @@ -758,7 +748,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -779,7 +768,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy) @@ -88,7 +88,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -107,7 +106,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -151,15 +149,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -197,15 +190,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -396,7 +384,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -415,7 +402,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -459,15 +445,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -505,15 +486,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -551,15 +527,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -597,15 +568,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -807,7 +773,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -826,7 +791,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -870,15 +834,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -916,15 +875,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -962,15 +916,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1008,15 +957,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1211,7 +1155,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1230,7 +1173,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1274,15 +1216,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1320,15 +1257,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/udf_java_method.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_java_method.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/udf_java_method.q.out (working copy) @@ -103,7 +103,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -122,7 +121,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- ql/src/test/results/clientpositive/sample5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -86,7 +86,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -106,7 +105,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/infer_bucket_sort_reducers_power_two.q.out =================================================================== --- ql/src/test/results/clientpositive/infer_bucket_sort_reducers_power_two.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/infer_bucket_sort_reducers_power_two.q.out (working copy) @@ -77,18 +77,18 @@ POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Output: default@test_table@part=1 +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') POSTHOOK: type: DESCTABLE +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -137,20 +137,20 @@ POSTHOOK: Output: default@test_table@part=1 POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -199,24 +199,24 @@ POSTHOOK: Output: default@test_table@part=1 POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -267,12 +267,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -281,12 +281,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -341,12 +341,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -357,12 +357,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None Index: ql/src/test/results/clientpositive/bucketcontext_8.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_8.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketcontext_8.q.out (working copy) @@ -139,15 +139,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -185,15 +180,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -292,15 +282,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -340,15 +325,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -495,15 +475,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -543,15 +518,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/udf_explode.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_explode.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/udf_explode.q.out (working copy) @@ -70,7 +70,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -89,7 +88,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -157,7 +155,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -176,7 +173,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -407,7 +403,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -426,7 +421,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -496,7 +490,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -515,7 +508,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2.q.out (working copy) @@ -130,12 +130,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -242,12 +236,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -362,12 +350,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -490,12 +472,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -626,12 +602,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -770,12 +740,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -922,12 +886,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1082,12 +1040,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/stats14.q.out =================================================================== --- ql/src/test/results/clientpositive/stats14.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats14.q.out (working copy) @@ -43,7 +43,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -175,11 +174,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information @@ -345,11 +339,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out (working copy) @@ -157,15 +157,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -208,15 +203,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_medium - numFiles 3 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 170 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -259,15 +249,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_medium - numFiles 3 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 170 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -408,15 +393,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -456,15 +436,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -504,15 +479,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_medium - numFiles 3 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 170 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -552,15 +522,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy) @@ -112,15 +112,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -158,15 +153,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketcontext_3.q.out (working copy) @@ -114,15 +114,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -160,15 +155,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -267,15 +257,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -419,15 +404,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucket3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucket3.q.out (working copy) @@ -60,7 +60,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -79,7 +78,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (working copy) @@ -89,15 +89,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -135,15 +130,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out (working copy) @@ -114,15 +114,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -221,15 +216,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/bucketmapjoin8.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin8.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin8.q.out (working copy) @@ -108,15 +108,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -214,15 +209,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -357,15 +347,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -463,15 +448,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/bucketmapjoin12.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin12.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin12.q.out (working copy) @@ -135,15 +135,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -241,15 +236,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -375,15 +365,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_3 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_3 @@ -473,15 +458,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/stats6.q.out =================================================================== --- ql/src/test/results/clientpositive/stats6.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats6.q.out (working copy) @@ -195,6 +195,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -238,6 +240,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -281,11 +285,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -144,15 +144,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -271,15 +266,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -688,15 +678,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -768,7 +753,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -820,15 +804,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -860,7 +839,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -897,7 +875,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -925,7 +902,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -944,7 +920,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -979,7 +954,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1007,7 +981,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1026,7 +999,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/alter_skewed_table.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_skewed_table.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/alter_skewed_table.q.out (working copy) @@ -58,6 +58,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -139,6 +142,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -222,6 +228,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/stats1.q.out =================================================================== --- ql/src/test/results/clientpositive/stats1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats1.q.out (working copy) @@ -206,7 +206,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 2 - numPartitions 0 numRows 26 rawDataSize 199 totalSize 225 @@ -256,7 +255,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 1583 Index: ql/src/test/results/clientpositive/smb_mapjoin_12.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_12.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/smb_mapjoin_12.q.out (working copy) @@ -176,15 +176,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table1 - numFiles 16 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 @@ -352,15 +347,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table3 - numFiles 16 - numPartitions 1 - numRows 3084 partition_columns ds - rawDataSize 32904 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 35988 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -405,15 +395,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table3 - numFiles 16 - numPartitions 1 - numRows 3084 partition_columns ds - rawDataSize 32904 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 35988 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -439,15 +424,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table3 - numFiles 16 - numPartitions 1 - numRows 3084 partition_columns ds - rawDataSize 32904 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 35988 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_7.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_7.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_7.q.out (working copy) @@ -336,10 +336,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -354,10 +354,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 @@ -381,10 +381,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 @@ -420,10 +420,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '1') (< (TOK_TABLE_OR_COL key) 8))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '1') (< (TOK_TABLE_OR_COL key) 8))))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test_table3) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL a) value) (. (TOK_TABLE_OR_COL b) value)))) (TOK_WHERE (or (= (. (TOK_TABLE_OR_COL a) key) 0) (= (. (TOK_TABLE_OR_COL a) key) 5))))) @@ -526,12 +526,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -546,12 +546,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 @@ -575,12 +575,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 Index: ql/src/test/results/clientpositive/join32_lessSize.q.out =================================================================== --- ql/src/test/results/clientpositive/join32_lessSize.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/join32_lessSize.q.out (working copy) @@ -110,7 +110,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -129,7 +128,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -152,7 +150,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -171,7 +168,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -224,15 +220,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -361,15 +352,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -632,7 +618,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -651,7 +636,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -674,7 +658,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -693,7 +676,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -797,7 +779,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -847,7 +828,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -866,7 +846,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -889,7 +868,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -908,7 +886,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -937,7 +914,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -1040,7 +1016,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -1090,7 +1065,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1109,7 +1083,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1132,7 +1105,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1151,7 +1123,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1252,7 +1223,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -1302,7 +1272,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1321,7 +1290,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1344,7 +1312,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1363,7 +1330,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1462,7 +1428,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1481,7 +1446,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1504,7 +1468,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1523,7 +1486,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1575,7 +1537,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -1824,7 +1785,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1843,7 +1803,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1866,7 +1825,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1885,7 +1843,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1938,15 +1895,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2075,15 +2027,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2363,7 +2310,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -2382,7 +2328,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -2405,7 +2350,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -2424,7 +2368,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -2477,15 +2420,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2548,7 +2486,6 @@ #### A masked pattern was here #### name default.dest_j2 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j2 { string key, string value, string val2} @@ -2619,15 +2556,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2650,7 +2582,6 @@ #### A masked pattern was here #### name default.dest_j2 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j2 { string key, string value, string val2} @@ -2666,7 +2597,6 @@ Stats-Aggr Operator #### A masked pattern was here #### - PREHOOK: query: INSERT OVERWRITE TABLE dest_j2 SELECT res.key, z.value, res.value FROM (select x.key, x.value from src1 x LEFT OUTER JOIN src y ON (x.key = y.key)) res Index: ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out (working copy) @@ -169,15 +169,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -217,15 +212,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -372,15 +362,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -420,15 +405,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -554,15 +534,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -600,15 +575,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -699,15 +669,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -747,15 +712,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -794,15 +754,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -841,15 +796,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -932,15 +882,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -979,15 +924,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1078,15 +1018,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1126,15 +1061,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1173,15 +1103,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1220,15 +1145,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1339,15 +1259,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1387,15 +1302,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy) @@ -80,7 +80,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -99,7 +98,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -143,15 +141,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -189,15 +182,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -235,15 +223,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -281,15 +264,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -481,7 +459,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -500,7 +477,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -544,15 +520,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -590,15 +561,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -636,15 +602,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -682,15 +643,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/list_bucket_dml_10.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_10.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/list_bucket_dml_10.q.out (working copy) @@ -117,7 +117,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -136,7 +135,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_2.q.out (working copy) @@ -984,10 +984,10 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -1010,10 +1010,10 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 @@ -1048,10 +1048,10 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 @@ -1094,10 +1094,10 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_TABLE_OR_COL value) (TOK_TABLE_OR_COL value)) v1)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_TABLE_OR_COL value) (TOK_TABLE_OR_COL value)) v2)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test_table3) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL a) v1) (. (TOK_TABLE_OR_COL b) v2)))))) @@ -1202,12 +1202,12 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -1230,12 +1230,12 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 @@ -1270,12 +1270,12 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 @@ -1318,12 +1318,12 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test_table3) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR (+ (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL a) key))) (TOK_SELEXPR (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL a) value) (. (TOK_TABLE_OR_COL b) value)))))) @@ -1624,14 +1624,14 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key EXPRESSION [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -1654,14 +1654,14 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key EXPRESSION [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 @@ -1706,11 +1706,11 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key EXPRESSION [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/udf_reflect.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reflect.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/udf_reflect.q.out (working copy) @@ -103,7 +103,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -122,7 +121,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (working copy) @@ -117,15 +117,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -162,15 +157,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -268,7 +258,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -288,7 +277,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} Index: ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out (working copy) @@ -144,15 +144,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -192,15 +187,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -326,15 +316,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -425,15 +410,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -473,15 +453,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -520,15 +495,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -611,15 +581,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -658,15 +623,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -757,15 +717,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -805,15 +760,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -852,15 +802,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -970,15 +915,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1018,15 +958,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -88,7 +88,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -108,7 +107,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy) @@ -109,15 +109,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -155,15 +150,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -201,15 +191,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -247,15 +232,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/regexp_extract.q.out =================================================================== --- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy) @@ -89,7 +89,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -108,7 +107,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -352,7 +350,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -371,7 +368,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucket_map_join_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_map_join_2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucket_map_join_2.q.out (working copy) @@ -126,7 +126,6 @@ #### A masked pattern was here #### name default.table1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct table1 { string key, string value} @@ -147,7 +146,6 @@ #### A masked pattern was here #### name default.table1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct table1 { string key, string value} Index: ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- ql/src/test/results/clientpositive/sample2.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -88,7 +88,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -108,7 +107,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/stats16.q.out =================================================================== --- ql/src/test/results/clientpositive/stats16.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats16.q.out (working copy) @@ -76,7 +76,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 Index: ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out =================================================================== --- ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (working copy) @@ -60,7 +60,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -79,7 +78,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/ppd_union_view.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_union_view.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/ppd_union_view.q.out (working copy) @@ -275,15 +275,10 @@ columns.types string:string #### A masked pattern was here #### name default.t1_mapping - numFiles 2 - numPartitions 2 - numRows 2 partition_columns ds - rawDataSize 24 serialization.ddl struct t1_mapping { string key, string keymap} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_mapping @@ -320,15 +315,10 @@ columns.types string:string #### A masked pattern was here #### name default.t1_old - numFiles 2 - numPartitions 2 - numRows 2 partition_columns ds - rawDataSize 28 serialization.ddl struct t1_old { string keymap, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_old @@ -803,15 +793,10 @@ columns.types string:string #### A masked pattern was here #### name default.t1_new - numFiles 2 - numPartitions 2 - numRows 2 partition_columns ds - rawDataSize 22 serialization.ddl struct t1_new { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 24 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_new Index: ql/src/test/results/clientpositive/ctas_colname.q.out =================================================================== --- ql/src/test/results/clientpositive/ctas_colname.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/ctas_colname.q.out (working copy) @@ -126,7 +126,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 1 rawDataSize 25 totalSize 26 @@ -259,7 +258,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 25 rawDataSize 242 totalSize 267 @@ -442,7 +440,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 20 rawDataSize 268 totalSize 288 @@ -596,7 +593,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 1 rawDataSize 5 totalSize 6 @@ -731,7 +727,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 309 rawDataSize 864 totalSize 1173 @@ -1178,7 +1173,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 5 rawDataSize 5 totalSize 10 @@ -1337,7 +1331,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 5 rawDataSize 35 totalSize 40 Index: ql/src/test/results/clientpositive/truncate_column.q.out =================================================================== --- ql/src/test/results/clientpositive/truncate_column.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/truncate_column.q.out (working copy) @@ -39,7 +39,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 185 @@ -107,7 +106,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 150 @@ -193,7 +191,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 75 @@ -269,7 +266,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 75 @@ -362,7 +358,6 @@ Table Parameters: #### A masked pattern was here #### numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 185 @@ -443,7 +438,6 @@ Table Parameters: #### A masked pattern was here #### numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 150 @@ -526,7 +520,6 @@ Table Parameters: #### A masked pattern was here #### numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 75 Index: ql/src/test/results/clientpositive/bucketcontext_5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucketcontext_5.q.out (working copy) @@ -138,7 +138,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -159,7 +158,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -282,7 +280,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -303,7 +300,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} Index: ql/src/test/results/clientpositive/describe_comment_nonascii.q.out =================================================================== --- ql/src/test/results/clientpositive/describe_comment_nonascii.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/describe_comment_nonascii.q.out (working copy) @@ -50,6 +50,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/stats11.q.out =================================================================== --- ql/src/test/results/clientpositive/stats11.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/stats11.q.out (working copy) @@ -331,15 +331,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -437,7 +432,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -457,7 +451,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -902,7 +895,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -954,15 +946,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -994,7 +981,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1031,7 +1017,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1059,7 +1044,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1078,7 +1062,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1113,7 +1096,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1141,7 +1123,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1160,7 +1141,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/bucket5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket5.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/bucket5.q.out (working copy) @@ -108,7 +108,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -127,7 +126,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -502,7 +500,6 @@ Table Parameters: SORTBUCKETCOLSPREFIX TRUE numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 5812 Index: ql/src/test/results/clientpositive/input23.q.out =================================================================== --- ql/src/test/results/clientpositive/input23.q.out (revision 1535192) +++ ql/src/test/results/clientpositive/input23.q.out (working copy) @@ -88,15 +88,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/compiler/plan/join2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join2.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/join2.q.xml (working copy) @@ -268,10 +268,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1102,10 +1098,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1178,10 +1170,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1847,10 +1835,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1983,10 +1967,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2728,10 +2708,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2804,10 +2780,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input2.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input2.q.xml (working copy) @@ -1685,10 +1685,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2843,10 +2839,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2919,10 +2911,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join3.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/join3.q.xml (working copy) @@ -257,10 +257,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -393,10 +389,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -529,10 +521,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1638,10 +1626,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1714,10 +1698,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input3.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input3.q.xml (working copy) @@ -2071,10 +2071,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -3524,10 +3520,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -3600,10 +3592,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join4.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/join4.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -246,10 +242,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1601,10 +1593,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1677,10 +1665,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input4.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input4.q.xml (working copy) @@ -257,10 +257,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1091,10 +1087,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1167,10 +1159,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join5.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/join5.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -246,10 +242,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1601,10 +1593,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1677,10 +1665,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input5.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input5.q.xml (working copy) @@ -261,10 +261,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1133,10 +1129,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1213,10 +1205,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex Index: ql/src/test/results/compiler/plan/join6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join6.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/join6.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -246,10 +242,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1601,10 +1593,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1677,10 +1665,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input_testxpath2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath2.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input_testxpath2.q.xml (working copy) @@ -114,10 +114,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -975,10 +971,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1055,10 +1047,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex Index: ql/src/test/results/compiler/plan/input6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input6.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input6.q.xml (working copy) @@ -636,10 +636,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1160,10 +1156,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1236,10 +1228,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join7.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/join7.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -246,10 +242,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -382,10 +374,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2359,10 +2347,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2435,10 +2419,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input7.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input7.q.xml (working copy) @@ -636,10 +636,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1066,10 +1062,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1142,10 +1134,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input8.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input8.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -738,10 +734,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -814,10 +806,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join8.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -246,10 +242,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1683,10 +1675,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1759,10 +1747,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input_testsequencefile.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (working copy) @@ -636,10 +636,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1077,10 +1073,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1153,10 +1145,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/union.q.xml =================================================================== --- ql/src/test/results/compiler/plan/union.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/union.q.xml (working copy) @@ -526,10 +526,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -662,10 +658,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1771,10 +1763,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1847,10 +1835,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input9.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input9.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input9.q.xml (working copy) @@ -636,10 +636,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1139,10 +1135,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1215,10 +1207,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf1.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2035,10 +2031,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2111,10 +2103,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input_testxpath.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input_testxpath.q.xml (working copy) @@ -114,10 +114,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -877,10 +873,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -957,10 +949,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex Index: ql/src/test/results/compiler/plan/udf6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf6.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/udf6.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -658,10 +654,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -734,10 +726,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input_part1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy) @@ -115,10 +115,6 @@ default.srcpart - numFiles - 4 - - columns.types string:string @@ -135,18 +131,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 4 - - partition_columns ds/hr @@ -163,10 +147,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 23248 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -963,10 +943,6 @@ default.srcpart - numFiles - 4 - - columns.types string:string @@ -983,18 +959,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 4 - - partition_columns ds/hr @@ -1011,10 +975,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 23248 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/compiler/plan/groupby1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby1.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/groupby1.q.xml (working copy) @@ -257,10 +257,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1077,10 +1073,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1153,10 +1145,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/udf_case.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_case.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/udf_case.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -762,10 +758,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -838,10 +830,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby2.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/groupby2.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1199,10 +1195,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1275,10 +1267,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/subq.q.xml =================================================================== --- ql/src/test/results/compiler/plan/subq.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/subq.q.xml (working copy) @@ -526,10 +526,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1061,10 +1057,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1137,10 +1129,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby3.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/groupby3.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1414,10 +1410,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1490,10 +1482,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby4.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/groupby4.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -821,10 +817,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -897,10 +889,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby5.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/groupby5.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -927,10 +923,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1003,10 +995,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby6.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/groupby6.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -821,10 +817,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -897,10 +889,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/case_sensitivity.q.xml =================================================================== --- ql/src/test/results/compiler/plan/case_sensitivity.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/case_sensitivity.q.xml (working copy) @@ -640,10 +640,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1371,10 +1367,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1451,10 +1443,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex Index: ql/src/test/results/compiler/plan/udf_when.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_when.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/udf_when.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -842,10 +838,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -918,10 +910,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input20.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input20.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input20.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -935,10 +931,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1011,10 +1003,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/sample1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample1.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy) @@ -115,10 +115,6 @@ default.srcpart - numFiles - 4 - - columns.types string:string @@ -135,18 +131,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 4 - - partition_columns ds/hr @@ -163,10 +147,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 23248 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1062,10 +1042,6 @@ default.srcpart - numFiles - 4 - - columns.types string:string @@ -1082,18 +1058,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 4 - - partition_columns ds/hr @@ -1110,10 +1074,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 23248 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/compiler/plan/sample2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample2.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/sample2.q.xml (working copy) @@ -644,10 +644,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1277,10 +1273,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1357,10 +1349,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample3.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/sample3.q.xml (working copy) @@ -644,10 +644,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1287,10 +1283,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1367,10 +1359,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample4.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/sample4.q.xml (working copy) @@ -644,10 +644,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1277,10 +1273,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1357,10 +1349,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample5.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/sample5.q.xml (working copy) @@ -644,10 +644,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1274,10 +1270,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1354,10 +1346,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample6.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/sample6.q.xml (working copy) @@ -644,10 +644,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1277,10 +1273,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1357,10 +1349,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample7.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -644,10 +644,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1322,10 +1318,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1402,10 +1394,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/cast1.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -110,10 +110,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1176,10 +1172,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1252,10 +1244,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join1.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/join1.q.xml (working copy) @@ -257,10 +257,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -393,10 +389,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1185,10 +1177,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1261,10 +1249,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input1.q.xml (revision 1535192) +++ ql/src/test/results/compiler/plan/input1.q.xml (working copy) @@ -636,10 +636,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1174,10 +1170,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1250,10 +1242,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestStatsPublisherEnhanced.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestStatsPublisherEnhanced.java (revision 1535192) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestStatsPublisherEnhanced.java (working copy) @@ -24,11 +24,11 @@ import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.stats.StatsAggregator; import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.mapred.JobConf; /** Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java (working copy) @@ -28,10 +28,10 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.Utilities; /** * Conditional task resolution interface. This is invoked at run time to get the @@ -231,7 +231,7 @@ throws IOException { DynamicPartitionCtx dpCtx = ctx.getDPCtx(); // get list of dynamic partitions - FileStatus[] status = Utilities.getFileStatusRecurse(dirPath, dpLbLevel, inpFs); + FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(dirPath, dpLbLevel, inpFs); // cleanup pathToPartitionInfo Map ptpi = work.getPathToPartitionInfo(); Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy) @@ -48,6 +48,8 @@ import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.HiveStatsUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaException; @@ -1365,7 +1367,7 @@ new ArrayList>(); FileSystem fs = loadPath.getFileSystem(conf); - FileStatus[] leafStatus = Utilities.getFileStatusRecurse(loadPath, numDP+1, fs); + FileStatus[] leafStatus = HiveStatsUtils.getFileStatusRecurse(loadPath, numDP+1, fs); // Check for empty partitions for (FileStatus s : leafStatus) { // Check if the hadoop version supports sub-directories for tables/partitions @@ -1558,6 +1560,17 @@ return getPartition(tbl, partSpec, forceCreate, null, true); } + private static void clearPartitionStats(org.apache.hadoop.hive.metastore.api.Partition tpart) { + Map tpartParams = tpart.getParameters(); + if (tpartParams == null) { + return; + } + List statTypes = StatsSetupConst.getSupportedStats(); + for (String statType : statTypes) { + tpartParams.remove(statType); + } + } + /** * Returns partition metadata * @@ -1627,6 +1640,7 @@ throw new HiveException("new partition path should not be null or empty."); } tpart.getSd().setLocation(partPath); + clearPartitionStats(tpart); String fullName = tbl.getTableName(); if (!org.apache.commons.lang.StringUtils.isEmpty(tbl.getDbName())) { fullName = tbl.getDbName() + "." + tbl.getTableName(); @@ -2565,4 +2579,5 @@ private static String[] getQualifiedNames(String qualifiedName) { return qualifiedName.split("\\."); } + }; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (working copy) @@ -34,6 +34,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.HiveStatsUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.io.FSRecordWriter; import org.apache.hadoop.hive.ql.io.FSRecordWriter.StatsProvidingRecordWriter; @@ -51,7 +53,6 @@ import org.apache.hadoop.hive.ql.plan.SkewedColumnPositionPair; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.Serializer; @@ -836,7 +837,7 @@ if (conf.isLinkedFileSink()) { level++; } - FileStatus[] status = Utilities.getFileStatusRecurse(tmpPath, level, fs); + FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(tmpPath, level, fs); sb.append("Sample of ") .append(Math.min(status.length, 100)) .append(" partitions created under ") Index: ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (working copy) @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; @@ -35,7 +36,6 @@ import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (working copy) @@ -19,23 +19,19 @@ package org.apache.hadoop.hive.ql.exec; -import java.io.FileNotFoundException; -import java.io.IOException; import java.io.Serializable; -import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.ql.DriverContext; @@ -43,7 +39,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Partition; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.ql.optimizer.listbucketingpruner.ListBucketingPrunerUtils; import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.LoadTableDesc; @@ -52,12 +47,16 @@ import org.apache.hadoop.hive.ql.stats.StatsAggregator; import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.util.StringUtils; /** - * StatsTask implementation. + * StatsTask implementation. StatsTask mainly deals with "collectable" stats. These are + * stats that require data scanning and are collected during query execution (unless the user + * explicitly requests data scanning just for the purpose of stats computation using the "ANALYZE" + * command. All other stats are computed directly by the MetaStore. The rationale being that the + * MetaStore layer covers all Thrift calls and provides better guarantees about the accuracy of + * those stats. **/ public class StatsTask extends Task implements Serializable { @@ -67,26 +66,9 @@ private Table table; private List> dpPartSpecs; - private static final List supportedStats = new ArrayList(); - private static final List collectableStats = new ArrayList(); - private static final Map nameMapping = new HashMap(); - static { - // supported statistics - supportedStats.add(StatsSetupConst.NUM_FILES); - supportedStats.add(StatsSetupConst.ROW_COUNT); - supportedStats.add(StatsSetupConst.TOTAL_SIZE); - supportedStats.add(StatsSetupConst.RAW_DATA_SIZE); + private static final List collectableStats = StatsSetupConst.getStatsToBeCollected(); + private static final List supportedStats = StatsSetupConst.getSupportedStats(); - // statistics that need to be collected throughout the execution - collectableStats.add(StatsSetupConst.ROW_COUNT); - collectableStats.add(StatsSetupConst.RAW_DATA_SIZE); - - nameMapping.put(StatsSetupConst.NUM_FILES, "num_files"); - nameMapping.put(StatsSetupConst.ROW_COUNT, "num_rows"); - nameMapping.put(StatsSetupConst.TOTAL_SIZE, "total_size"); - nameMapping.put(StatsSetupConst.RAW_DATA_SIZE, "raw_data_size"); - } - public StatsTask() { super(); dpPartSpecs = null; @@ -94,20 +76,20 @@ /** * - * Partition Level Statistics. + * Statistics for a Partition or Unpartitioned Table * */ - class PartitionStatistics { + class Statistics { Map stats; - public PartitionStatistics() { + public Statistics() { stats = new HashMap(); for (String statType : supportedStats) { stats.put(statType, new LongWritable(0L)); } } - public PartitionStatistics(Map st) { + public Statistics(Map st) { stats = new HashMap(); for (String statType : st.keySet()) { Long stValue = st.get(statType) == null ? 0L : st.get(statType); @@ -126,89 +108,10 @@ @Override public String toString() { - StringBuilder sb = new StringBuilder(); - for (String statType : supportedStats) { - sb.append(nameMapping.get(statType)).append(": ").append(stats.get(statType)).append(", "); - } - sb.delete(sb.length() - 2, sb.length()); - return sb.toString(); + return org.apache.commons.lang.StringUtils.join(supportedStats, ", "); } } - /** - * Table Level Statistics. - */ - class TableStatistics extends PartitionStatistics { - int numPartitions; // number of partitions - - public TableStatistics() { - super(); - numPartitions = 0; - } - - public void setNumPartitions(int np) { - numPartitions = np; - } - - public int getNumPartitions() { - return numPartitions; - } - - /** - * Incrementally update the table statistics according to the old and new - * partition level statistics. - * - * @param oldStats - * The old statistics of a partition. - * @param newStats - * The new statistics of a partition. - */ - public void updateStats(PartitionStatistics oldStats, PartitionStatistics newStats) { - deletePartitionStats(oldStats); - addPartitionStats(newStats); - } - - /** - * Update the table level statistics when a new partition is added. - * - * @param newStats - * the new partition statistics. - */ - public void addPartitionStats(PartitionStatistics newStats) { - for (String statType : supportedStats) { - LongWritable value = stats.get(statType); - if (value == null) { - stats.put(statType, new LongWritable(newStats.getStat(statType))); - } else { - value.set(value.get() + newStats.getStat(statType)); - } - } - this.numPartitions++; - } - - /** - * Update the table level statistics when an old partition is dropped. - * - * @param oldStats - * the old partition statistics. - */ - public void deletePartitionStats(PartitionStatistics oldStats) { - for (String statType : supportedStats) { - LongWritable value = stats.get(statType); - value.set(value.get() - oldStats.getStat(statType)); - } - this.numPartitions--; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("num_partitions: ").append(numPartitions).append(", "); - sb.append(super.toString()); - return sb.toString(); - } - } - @Override protected void receiveFeed(FeedType feedType, Object feedValue) { // this method should be called by MoveTask when there are dynamic partitions generated @@ -297,7 +200,7 @@ } } - TableStatistics tblStats = new TableStatistics(); + Statistics tblStats = new Statistics(); org.apache.hadoop.hive.metastore.api.Table tTable = table.getTTable(); Map parameters = tTable.getParameters(); @@ -310,10 +213,6 @@ } } - if (parameters.containsKey(StatsSetupConst.NUM_PARTITIONS)) { - tblStats.setNumPartitions(Integer.parseInt(parameters.get(StatsSetupConst.NUM_PARTITIONS))); - } - List partitions = getPartitionsList(); boolean atomic = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_ATOMIC); int maxPrefixLength = HiveConf.getIntVar(conf, @@ -324,10 +223,6 @@ if (!tableStatsExist && atomic) { return 0; } - long[] summary = summary(conf, table); - tblStats.setStat(StatsSetupConst.NUM_FILES, summary[0]); - tblStats.setStat(StatsSetupConst.TOTAL_SIZE, summary[1]); - // In case of a non-partitioned table, the key for stats temporary store is "rootDir" if (statsAggregator != null) { String aggKey = Utilities.getHashedStatsPrefix(work.getAggKey(), maxPrefixLength); @@ -344,6 +239,19 @@ } } } + + // write table stats to metastore + parameters = tTable.getParameters(); + for (String statType : collectableStats) { + parameters.put(statType, Long.toString(tblStats.getStat(statType))); + } + tTable.setParameters(parameters); + + String tableFullName = table.getDbName() + "." + table.getTableName(); + + db.alterTable(tableFullName, new Table(tTable)); + + console.printInfo("Table " + tableFullName + " stats: [" + tblStats.toString() + ']'); } else { // Partitioned table: // Need to get the old stats of the partition @@ -370,7 +278,7 @@ // // get the new partition stats // - PartitionStatistics newPartStats = new PartitionStatistics(); + Statistics newPartStats = new Statistics(); // In that case of a partition, the key for stats temporary store is // "rootDir/[dynamic_partition_specs/]%" @@ -398,16 +306,16 @@ } } - long[] summary = summary(conf, partn); - newPartStats.setStat(StatsSetupConst.NUM_FILES, summary[0]); - newPartStats.setStat(StatsSetupConst.TOTAL_SIZE, summary[1]); - - if (hasStats) { - PartitionStatistics oldPartStats = new PartitionStatistics(currentValues); - tblStats.updateStats(oldPartStats, newPartStats); - } else { - tblStats.addPartitionStats(newPartStats); + /** + * calculate fast statistics + */ + FileStatus[] partfileStatus = wh.getFileStatusesForPartition(tPart); + newPartStats.setStat(StatsSetupConst.NUM_FILES, partfileStatus.length); + long partSize = 0L; + for (int i = 0; i < partfileStatus.length; i++) { + partSize += partfileStatus[i].getLen(); } + newPartStats.setStat(StatsSetupConst.TOTAL_SIZE, partSize); // // update the metastore @@ -429,22 +337,6 @@ } - // - // write table stats to metastore - // - parameters = tTable.getParameters(); - for (String statType : supportedStats) { - parameters.put(statType, Long.toString(tblStats.getStat(statType))); - } - parameters.put(StatsSetupConst.NUM_PARTITIONS, Integer.toString(tblStats.getNumPartitions())); - tTable.setParameters(parameters); - - String tableFullName = table.getDbName() + "." + table.getTableName(); - - db.alterTable(tableFullName, new Table(tTable)); - - console.printInfo("Table " + tableFullName + " stats: [" + tblStats.toString() + ']'); - } catch (Exception e) { console.printInfo("[Warning] could not update stats.", "Failed with exception " + e.getMessage() + "\n" @@ -464,105 +356,6 @@ return ret; } - private long[] summary(HiveConf conf, Partition partn) throws IOException { - Path path = partn.getPartitionPath(); - FileSystem fs = path.getFileSystem(conf); - List skewedColNames = partn.getSkewedColNames(); - if (skewedColNames == null || skewedColNames.isEmpty()) { - return summary(fs, path); - } - List> skewColValues = table.getSkewedColValues(); - if (skewColValues == null || skewColValues.isEmpty()) { - return summary(fs, toDefaultLBPath(path)); - } - return summary(fs, path, skewedColNames); - } - - private long[] summary(HiveConf conf, Table table) throws IOException { - Path path = table.getPath(); - FileSystem fs = path.getFileSystem(conf); - List skewedColNames = table.getSkewedColNames(); - if (skewedColNames == null || skewedColNames.isEmpty()) { - return summary(fs, path); - } - List> skewColValues = table.getSkewedColValues(); - if (skewColValues == null || skewColValues.isEmpty()) { - return summary(fs, toDefaultLBPath(path)); - } - return summary(fs, path, table.getSkewedColNames()); - } - - private Path toDefaultLBPath(Path path) { - return new Path(path, ListBucketingPrunerUtils.HIVE_LIST_BUCKETING_DEFAULT_DIR_NAME); - } - - private long[] summary(FileSystem fs, Path path) throws IOException { - try { - FileStatus status = fs.getFileStatus(path); - if (!status.isDir()) { - return new long[] {1, status.getLen()}; - } - } catch (FileNotFoundException e) { - return new long[] {0, 0}; - } - FileStatus[] children = fs.listStatus(path); // can be null - if (children == null) { - return new long[] {0, 0}; - } - long numFiles = 0L; - long tableSize = 0L; - for (FileStatus child : children) { - if (!child.isDir()) { - tableSize += child.getLen(); - numFiles++; - } - } - return new long[] {numFiles, tableSize}; - } - - private Pattern toPattern(List skewCols) { - StringBuilder builder = new StringBuilder(); - for (String skewCol : skewCols) { - if (builder.length() > 0) { - builder.append(Path.SEPARATOR_CHAR); - } - builder.append(skewCol).append('='); - builder.append("[^").append(Path.SEPARATOR_CHAR).append("]*"); - } - builder.append(Path.SEPARATOR_CHAR); - builder.append("[^").append(Path.SEPARATOR_CHAR).append("]*$"); - return Pattern.compile(builder.toString()); - } - - private long[] summary(FileSystem fs, Path path, List skewCols) throws IOException { - long numFiles = 0L; - long tableSize = 0L; - Pattern pattern = toPattern(skewCols); - for (FileStatus status : Utilities.getFileStatusRecurse(path, skewCols.size() + 1, fs)) { - if (status.isDir()) { - continue; - } - String relative = toRelativePath(path, status.getPath()); - if (relative == null) { - continue; - } - if (relative.startsWith(ListBucketingPrunerUtils.HIVE_LIST_BUCKETING_DEFAULT_DIR_NAME) || - pattern.matcher(relative).matches()) { - tableSize += status.getLen(); - numFiles++; - } - } - return new long[] {numFiles, tableSize}; - } - - private String toRelativePath(Path path1, Path path2) { - URI relative = path1.toUri().relativize(path2.toUri()); - if (relative == path2.toUri()) { - return null; - } - return relative.getPath(); - } - private boolean existStats(Map parameters) { return parameters.containsKey(StatsSetupConst.ROW_COUNT) || parameters.containsKey(StatsSetupConst.NUM_FILES) @@ -571,7 +364,7 @@ || parameters.containsKey(StatsSetupConst.NUM_PARTITIONS); } - private void updateStats(List statsList, PartitionStatistics stats, + private void updateStats(List statsList, Statistics stats, StatsAggregator statsAggregator, Map parameters, String aggKey, boolean atomic) throws HiveException { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorFileSinkOperator.java (working copy) @@ -25,7 +25,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.FileSinkDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (working copy) @@ -95,6 +95,7 @@ import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hive.common.HiveInterruptCallback; import org.apache.hadoop.hive.common.HiveInterruptUtils; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -1740,7 +1741,7 @@ ArrayList result = new ArrayList(); if (dpCtx != null) { - FileStatus parts[] = getFileStatusRecurse(path, dpCtx.getNumDPCols(), fs); + FileStatus parts[] = HiveStatsUtils.getFileStatusRecurse(path, dpCtx.getNumDPCols(), fs); HashMap taskIDToFile = null; for (int i = 0; i < parts.length; ++i) { @@ -2241,7 +2242,7 @@ Path loadPath = new Path(dpCtx.getRootPath()); FileSystem fs = loadPath.getFileSystem(conf); int numDPCols = dpCtx.getNumDPCols(); - FileStatus[] status = Utilities.getFileStatusRecurse(loadPath, numDPCols, fs); + FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(loadPath, numDPCols, fs); if (status.length == 0) { LOG.warn("No partition is generated by dynamic partitioning"); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; @@ -194,11 +195,11 @@ } } - @Override public int execute(DriverContext driverContext) { try { + // Do any hive related operations like moving tables and files // to appropriate locations LoadFileDesc lfd = work.getLoadFileWork(); @@ -460,7 +461,7 @@ boolean updateBucketCols = false; if (bucketCols != null) { FileSystem fileSys = partn.getPartitionPath().getFileSystem(conf); - FileStatus[] fileStatus = Utilities.getFileStatusRecurse( + FileStatus[] fileStatus = HiveStatsUtils.getFileStatusRecurse( partn.getPartitionPath(), 1, fileSys); // Verify the number of buckets equals the number of files // This will not hold for dynamic partitions where not every reducer produced a file for Index: ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsUtils.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsUtils.java (working copy) @@ -23,7 +23,7 @@ import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; +import org.apache.hadoop.hive.common.StatsSetupConst; public class JDBCStatsUtils { Index: ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java (working copy) @@ -24,6 +24,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.util.ReflectionUtils; Index: ql/src/java/org/apache/hadoop/hive/ql/stats/StatsSetupConst.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsSetupConst.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsSetupConst.java (working copy) @@ -1,64 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.stats; - -/** - * A class that defines the constant strings used by the statistics implementation. - */ - -public class StatsSetupConst { - - /** - * The value of the user variable "hive.stats.dbclass" to use HBase implementation. - */ - public static final String HBASE_IMPL_CLASS_VAL = "hbase"; - - /** - * The value of the user variable "hive.stats.dbclass" to use JDBC implementation. - */ - public static final String JDBC_IMPL_CLASS_VAL = "jdbc"; - - /** - * The name of the statistic Num Files to be published or gathered. - */ - public static final String NUM_FILES = "numFiles"; - - /** - * The name of the statistic Num Partitions to be published or gathered. - */ - public static final String NUM_PARTITIONS = "numPartitions"; - - /** - * The name of the statistic Total Size to be published or gathered. - */ - public static final String TOTAL_SIZE = "totalSize"; - - - // statistics stored in metastore - - /** - * The name of the statistic Row Count to be published or gathered. - */ - public static final String ROW_COUNT = "numRows"; - - /** - * The name of the statistic Raw Data Size to be published or gathered. - */ - public static final String RAW_DATA_SIZE = "rawDataSize"; - -} Index: ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java (working copy) @@ -27,8 +27,8 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.Explain; @@ -152,7 +152,7 @@ Path dirPath = new Path(dirName); try { FileSystem inpFs = dirPath.getFileSystem(conf); - FileStatus[] status = Utilities.getFileStatusRecurse(dirPath, listBucketingCtx + FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(dirPath, listBucketingCtx .getSkewedColNames().size(), inpFs); List newInputPath = new ArrayList(); boolean succeed = true; Index: ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java (revision 1535192) +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java (working copy) @@ -24,6 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -33,7 +34,6 @@ import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileValueBufferWrapper; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.hive.shims.CombineHiveKey; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.MapReduceBase;