Index: hbase-handler/src/test/results/positive/hbase_stats.q.out =================================================================== --- hbase-handler/src/test/results/positive/hbase_stats.q.out (revision 1478217) +++ hbase-handler/src/test/results/positive/hbase_stats.q.out (working copy) @@ -43,7 +43,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -175,11 +174,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information @@ -345,11 +339,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information Index: hbase-handler/src/test/results/positive/hbase_stats2.q.out =================================================================== --- hbase-handler/src/test/results/positive/hbase_stats2.q.out (revision 1478217) +++ hbase-handler/src/test/results/positive/hbase_stats2.q.out (working copy) @@ -43,7 +43,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -175,11 +174,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information @@ -345,11 +339,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information Index: hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java =================================================================== --- hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java (revision 1478217) +++ hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStatsUtils.java (working copy) @@ -25,7 +25,7 @@ import java.util.Map; import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; +import org.apache.hadoop.hive.common.StatsSetupConst; Index: metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java =================================================================== --- metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (revision 1478217) +++ metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (working copy) @@ -463,7 +463,7 @@ } private static Partition makePartitionObject(String dbName, String tblName, - List ptnVals, Table tbl, String ptnLocationSuffix) { + List ptnVals, Table tbl, String ptnLocationSuffix) throws MetaException { Partition part4 = new Partition(); part4.setDbName(dbName); part4.setTableName(tblName); @@ -472,6 +472,7 @@ part4.setSd(tbl.getSd().deepCopy()); part4.getSd().setSerdeInfo(tbl.getSd().getSerdeInfo().deepCopy()); part4.getSd().setLocation(tbl.getSd().getLocation() + ptnLocationSuffix); + MetaStoreUtils.updatePartitionStatsFast(part4, warehouse); return part4; } Index: metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (revision 1478217) +++ metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java (working copy) @@ -40,18 +40,22 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hadoop.hive.metastore.api.InvalidOperationException; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.SerDeInfo; import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; -import org.apache.hadoop.hive.metastore.api.InvalidOperationException; -import org.apache.hadoop.hive.serde.serdeConstants;; +import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.Deserializer; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; @@ -154,6 +158,162 @@ } /** + * @param partParams + * @return True if the passed Parameters Map contains values for all "Fast Stats". + */ + public static boolean containsAllFastStats(Map partParams) { + List fastStats = StatsSetupConst.getFastStats(); + boolean result = true; + for (String stat : fastStats) { + if (!partParams.containsKey(stat)) { + result = false; + break; + } + } + return result; + } + + public static boolean updateUnpartitionedTableStatsFast(Database db, Table tbl, Warehouse wh) + throws MetaException { + return updateUnpartitionedTableStatsFast(db, tbl, wh, false, false); + } + + public static boolean updateUnpartitionedTableStatsFast(Database db, Table tbl, Warehouse wh, + boolean madeDir) throws MetaException { + return updateUnpartitionedTableStatsFast(db, tbl, wh, madeDir, false); + } + + /** + * Updates the numFiles and totalSize parameters for the passed unpartitioned Table by querying + * the warehouse if the passed Table does not already have values for these parameters. + * @param db + * @param tbl + * @param wh + * @param madeDir if true, the directory was just created and can be assumed to be empty + * @param forceRecompute Recompute stats even if the passed Table already has + * these parameters set + * @return true if the stats were updated, false otherwise + */ + public static boolean updateUnpartitionedTableStatsFast(Database db, Table tbl, Warehouse wh, + boolean madeDir, boolean forceRecompute) throws MetaException { + Map params = tbl.getParameters(); + boolean updated = false; + if (forceRecompute || + params == null || + !containsAllFastStats(params)) { + if (params == null) { + params = new HashMap(); + } + if (!madeDir) { + LOG.warn("Updating table stats fast"); + FileStatus[] fileStatus = wh.getFileStatusesForUnpartitionedTable(db, tbl); + params.put(StatsSetupConst.NUM_FILES, Integer.toString(fileStatus.length)); + long tableSize = 0L; + for (int i = 0; i < fileStatus.length; i++) { + tableSize += fileStatus[i].getLen(); + } + params.put(StatsSetupConst.TOTAL_SIZE, Long.toString(tableSize)); + LOG.warn("Updated size to " + Long.toString(tableSize)); + if (params.containsKey(StatsSetupConst.ROW_COUNT) || + params.containsKey(StatsSetupConst.RAW_DATA_SIZE)) { + // TODO: Add a MetaStore flag indicating accuracy of these stats and update it here. + } + } + tbl.setParameters(params); + updated = true; + } + return updated; + } + + private static boolean existFastStats(Map parameters) { + return parameters.containsKey(StatsSetupConst.NUM_FILES) + && parameters.containsKey(StatsSetupConst.TOTAL_SIZE); + } + + public static boolean requireCalStats(Partition oldPart, Partition newPart) { + // requires to calculate stats if new partition doesn't have it + if ((newPart == null) || (newPart.getParameters() == null) + || !existFastStats(newPart.getParameters())) { + return true; + } + + // requires to calcualte stats if new and old have different stats + if ((oldPart != null) && (oldPart.getParameters() != null)) { + if (oldPart.getParameters().containsKey(StatsSetupConst.NUM_FILES)) { + Long oldNoFile = Long.parseLong(oldPart.getParameters().get(StatsSetupConst.NUM_FILES)); + Long newNoFile = Long.parseLong(newPart.getParameters().get(StatsSetupConst.NUM_FILES)); + if (oldNoFile != newNoFile) { + return true; + } + } + + if (oldPart.getParameters().containsKey(StatsSetupConst.TOTAL_SIZE)) { + Long oldTotalSize = Long.parseLong(oldPart.getParameters().get(StatsSetupConst.TOTAL_SIZE)); + Long newTotalSize = Long.parseLong(newPart.getParameters().get(StatsSetupConst.TOTAL_SIZE)); + if (oldTotalSize != newTotalSize) { + return true; + } + } + } + return false; + } + + public static boolean updatePartitionStatsFast(Partition part, Warehouse wh) + throws MetaException { + return updatePartitionStatsFast(part, wh, false, false); + } + + public static boolean updatePartitionStatsFast(Partition part, Warehouse wh, boolean madeDir) + throws MetaException { + return updatePartitionStatsFast(part, wh, madeDir, false); + } + + /** + * Updates the numFiles and totalSize parameters for the passed Partition by querying + * the warehouse if the passed Partition does not already have values for these parameters. + * @param part + * @param wh + * @param madeDir if true, the directory was just created and can be assumed to be empty + * @param forceRecompute Recompute stats even if the passed Partition already has + * these parameters set + * @return true if the stats were updated, false otherwise + */ + public static boolean updatePartitionStatsFast(Partition part, Warehouse wh, + boolean madeDir, boolean forceRecompute) throws MetaException { + Map params = part.getParameters(); + boolean updated = false; + if (forceRecompute || + params == null || + !containsAllFastStats(params)) { + if (params == null) { + params = new HashMap(); + } + if (!madeDir) { + // The partitition location already existed and may contain data. Lets try to + // populate those statistics that don't require a full scan of the data. + LOG.warn("Updating partition stats fast"); + FileStatus[] fileStatus = wh.getFileStatusesForPartition(part); + params.put(StatsSetupConst.NUM_FILES, Integer.toString(fileStatus.length)); + long partSize = 0L; + for (int i = 0; i < fileStatus.length; i++) { + partSize += fileStatus[i].getLen(); + } + params.put(StatsSetupConst.TOTAL_SIZE, Long.toString(partSize)); + LOG.warn("Updated size to " + Long.toString(partSize)); + if (params.containsKey(StatsSetupConst.ROW_COUNT) || + params.containsKey(StatsSetupConst.RAW_DATA_SIZE)) { + // The accuracy of these "collectable" stats at this point is suspect unless we know that + // StatsTask was just run before this MetaStore call and populated them. + // TODO: Add a MetaStore flag indicating accuracy of these stats and update it here. + } + } + part.setParameters(params); + updated = true; + } + return updated; + } + + /** * getDeserializer * * Get the Deserializer for a table given its name and properties. @@ -379,7 +539,7 @@ throw new InvalidOperationException( "The following columns have types incompatible with the existing " + "columns in their respective positions :\n" + - StringUtils.join(",", incompatibleCols) + org.apache.commons.lang.StringUtils.join(incompatibleCols, ",") ); } } @@ -1132,6 +1292,13 @@ return filter.toString(); } + public static boolean isView(Table table) { + if (table == null) { + return false; + } + return TableType.VIRTUAL_VIEW.toString().equals(table.getTableType()); + } + /** * create listener instances as per the configuration. * Index: metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (revision 1478217) +++ metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (working copy) @@ -43,11 +43,15 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.metastore.api.Partition; +import org.apache.hadoop.hive.metastore.api.SkewedInfo; +import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.ReflectionUtils; @@ -462,6 +466,66 @@ } /** + * @param partn + * @return array of FileStatus objects corresponding to the files making up the passed partition + */ + public FileStatus[] getFileStatusesForPartition(Partition partn) + throws MetaException { + try { + Path path = new Path(partn.getSd().getLocation()); + FileSystem fileSys = path.getFileSystem(conf); + /* consider sub-directory created from list bucketing. */ + int listBucketingDepth = calculateListBucketingDMLDepth(partn); + return HiveStatsUtils.getFileStatusRecurse(path, (1 + listBucketingDepth), fileSys); + } catch (IOException ioe) { + MetaStoreUtils.logAndThrowMetaException(ioe); + } + return null; + } + + /** + * List bucketing will introduce sub-directories. + * + * calculate it here in order to go to the leaf directory + * + * so that we can count right number of files. + * + * @param partn + * @return + */ + public static int calculateListBucketingDMLDepth(Partition partn) { + // list bucketing will introduce more files + int listBucketingDepth = 0; + SkewedInfo skewedInfo = partn.getSd().getSkewedInfo(); + if ((skewedInfo != null) && (skewedInfo.getSkewedColNames() != null) + && (skewedInfo.getSkewedColNames().size() > 0) + && (skewedInfo.getSkewedColValues() != null) + && (skewedInfo.getSkewedColValues().size() > 0) + && (skewedInfo.getSkewedColValueLocationMaps() != null) + && (skewedInfo.getSkewedColValueLocationMaps().size() > 0)) { + listBucketingDepth = skewedInfo.getSkewedColNames().size(); + } + return listBucketingDepth; + } + + /** + * @param table + * @return array of FileStatus objects corresponding to the files making up the passed + * unpartitioned table + */ + public FileStatus[] getFileStatusesForUnpartitionedTable(Database db, Table table) + throws MetaException { + Path tablePath = getTablePath(db, table.getTableName()); + try { + FileSystem fileSys = tablePath.getFileSystem(conf); + return HiveStatsUtils.getFileStatusRecurse(tablePath, 1, fileSys); + } catch (IOException ioe) { + MetaStoreUtils.logAndThrowMetaException(ioe); + } + return null; + } + + /** * Makes a valid partition name. * @param partCols The partition columns * @param vals The partition values Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (revision 1478217) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (working copy) @@ -1025,7 +1025,8 @@ ms.openTransaction(); - if (ms.getDatabase(tbl.getDbName()) == null) { + Database db = ms.getDatabase(tbl.getDbName()); + if (db == null) { throw new NoSuchObjectException("The database " + tbl.getDbName() + " does not exist"); } @@ -1059,6 +1060,14 @@ madeDir = true; } } + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl)) { + if (tbl.getPartitionKeysSize() == 0) { // Unpartitioned table + MetaStoreUtils.updateUnpartitionedTableStatsFast(db, tbl, wh, madeDir); + } else { // Partitioned table with no partitions. + MetaStoreUtils.updateUnpartitionedTableStatsFast(db, tbl, wh, true); + } + } // set create time long time = System.currentTimeMillis() / 1000; @@ -1536,6 +1545,11 @@ part.setCreateTime((int) time); part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(time)); + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl)) { + MetaStoreUtils.updatePartitionStatsFast(part, wh, madeDir); + } + success = ms.addPartition(part); if (success) { success = ms.commitTransaction(); @@ -1756,6 +1770,11 @@ } } + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl)) { + MetaStoreUtils.updatePartitionStatsFast(part, wh, madeDir); + } + // set create time long time = System.currentTimeMillis() / 1000; part.setCreateTime((int) time); Index: metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java =================================================================== --- metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (revision 1478217) +++ metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (working copy) @@ -20,8 +20,10 @@ import java.io.IOException; import java.net.URI; import java.util.ArrayList; +import java.util.HashMap; import java.util.Iterator; import java.util.List; +import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; @@ -29,8 +31,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.AlreadyExistsException; +import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.InvalidObjectException; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; @@ -58,6 +62,32 @@ hiveConf = conf; } + /** + * Copies the stats values available in params1 to params2 so that params2 ends up with exactly + * the same stats as params1. + * @param params1 + * @param params2 + * @return the updated Map for params2 + */ + public static Map copyStatsParameters( + Map params1, Map params2) { + if (params1 == null) { + params1 = new HashMap(); + } + if (params2 == null) { + params2 = new HashMap(); + } + List allStats = StatsSetupConst.getSupportedStats(); + for (String statType : allStats) { + if (params1.containsKey(statType)) { + params2.put(statType, params1.get(statType)); + } else { + params2.remove(statType); + } + } + return params2; + } + public void alterTable(RawStore msdb, Warehouse wh, String dbname, String name, Table newt) throws InvalidOperationException, MetaException { if (newt == null) { @@ -192,6 +222,10 @@ msdb.alterPartition(dbname, name, part.getValues(), part); } } + } else if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(newt) && (newt.getPartitionKeysSize() == 0)) { + Database db = msdb.getDatabase(newt.getDbName()); + MetaStoreUtils.updateUnpartitionedTableStatsFast(db, newt, wh, false, true); } // now finally call alter table msdb.alterTable(dbname, name, newt); @@ -251,10 +285,10 @@ Path destPath = null; FileSystem srcFs = null; FileSystem destFs = null; - Table tbl = null; Partition oldPart = null; String oldPartLoc = null; String newPartLoc = null; + // Set DDL time to now if not specified if (new_part.getParameters() == null || new_part.getParameters().get(hive_metastoreConstants.DDL_TIME) == null || @@ -262,10 +296,16 @@ new_part.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(System .currentTimeMillis() / 1000)); } + + Table tbl = msdb.getTable(dbname, name); //alter partition if (part_vals == null || part_vals.size() == 0) { try { oldPart = msdb.getPartition(dbname, name, new_part.getValues()); + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl) && MetaStoreUtils.requireCalStats(oldPart, new_part)) { + MetaStoreUtils.updatePartitionStatsFast(new_part, wh, false, true); + } msdb.alterPartition(dbname, name, new_part.getValues(), new_part); } catch (InvalidObjectException e) { throw new InvalidOperationException("alter is not possible"); @@ -296,7 +336,6 @@ throw new AlreadyExistsException("Partition already exists:" + dbname + "." + name + "." + new_part.getValues()); } - tbl = msdb.getTable(dbname, name); if (tbl == null) { throw new InvalidObjectException( "Unable to rename partition because table or database do not exist"); @@ -348,6 +387,10 @@ + tbl.getTableName() + " " + new_part.getValues()); } new_part.getSd().setLocation(newPartLoc); + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl) && MetaStoreUtils.requireCalStats(oldPart, new_part)) { + MetaStoreUtils.updatePartitionStatsFast(new_part, wh, false, true); + } msdb.alterPartition(dbname, name, part_vals, new_part); } } @@ -396,6 +439,7 @@ MetaException { List oldParts = new ArrayList(); List> partValsList = new ArrayList>(); + Table tbl = msdb.getTable(dbname, name); try { for (Partition tmpPart: new_parts) { // Set DDL time to now if not specified @@ -405,9 +449,15 @@ tmpPart.putToParameters(hive_metastoreConstants.DDL_TIME, Long.toString(System .currentTimeMillis() / 1000)); } + Partition oldTmpPart = msdb.getPartition(dbname, name, tmpPart.getValues()); oldParts.add(oldTmpPart); partValsList.add(tmpPart.getValues()); + + if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.HIVESTATSAUTOGATHER) && + !MetaStoreUtils.isView(tbl) && MetaStoreUtils.requireCalStats(oldTmpPart, tmpPart)) { + MetaStoreUtils.updatePartitionStatsFast(tmpPart, wh, false, true); + } } msdb.alterPartitions(dbname, name, partValsList, new_parts); } catch (InvalidObjectException e) { Index: common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/StatsSetupConst.java (revision 0) @@ -0,0 +1,111 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + + +/** + * A class that defines the constant strings used by the statistics implementation. + */ + +public class StatsSetupConst { + + /** + * The value of the user variable "hive.stats.dbclass" to use HBase implementation. + */ + public static final String HBASE_IMPL_CLASS_VAL = "hbase"; + + /** + * The value of the user variable "hive.stats.dbclass" to use JDBC implementation. + */ + public static final String JDBC_IMPL_CLASS_VAL = "jdbc"; + + /** + * The name of the statistic Num Files to be published or gathered. + */ + public static final String NUM_FILES = "numFiles"; + + /** + * The name of the statistic Num Partitions to be published or gathered. + */ + public static final String NUM_PARTITIONS = "numPartitions"; + + /** + * The name of the statistic Total Size to be published or gathered. + */ + public static final String TOTAL_SIZE = "totalSize"; + + + // statistics stored in metastore + + /** + * The name of the statistic Row Count to be published or gathered. + */ + public static final String ROW_COUNT = "numRows"; + + /** + * The name of the statistic Raw Data Size to be published or gathered. + */ + public static final String RAW_DATA_SIZE = "rawDataSize"; + + /** + * @return List of all supported statistics + */ + public static List getSupportedStats() { + List supportedStats = new ArrayList(); + supportedStats.add(NUM_FILES); + supportedStats.add(ROW_COUNT); + supportedStats.add(TOTAL_SIZE); + supportedStats.add(RAW_DATA_SIZE); + return supportedStats; + } + + /** + * @return List of all statistics that need to be collected during query execution. These are + * statistics that inherently require a scan of the data. + */ + public static List getCollectableStats() { + List collectableStats = new ArrayList(); + collectableStats.add(ROW_COUNT); + collectableStats.add(RAW_DATA_SIZE); + return collectableStats; + } + + /** + * @return List of statistics that can be collected quickly without requiring a scan of the data. + */ + public static List getFastStats() { + List fastStats = new ArrayList(); + fastStats.add(NUM_FILES); + fastStats.add(TOTAL_SIZE); + return fastStats; + } + + public static Map getNameMapping() { + Map nameMapping = new HashMap(); + nameMapping.put(NUM_FILES, "num_files"); + nameMapping.put(ROW_COUNT, "num_rows"); + nameMapping.put(TOTAL_SIZE, "total_size"); + nameMapping.put(RAW_DATA_SIZE, "raw_data_size"); + return nameMapping; + } +} Index: common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java =================================================================== --- common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java (revision 0) +++ common/src/java/org/apache/hadoop/hive/common/HiveStatsUtils.java (revision 0) @@ -0,0 +1,58 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.common; + +import java.io.IOException; + +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +/** + * HiveStatsUtils. + * A collection of utilities used for hive statistics. + * Used by classes in both metastore and ql package + */ + +public class HiveStatsUtils { + + /** + * Get all file status from a root path and recursively go deep into certain levels. + * + * @param path + * the root path + * @param level + * the depth of directory should explore + * @param fs + * the file system + * @return array of FileStatus + * @throws IOException + */ + public static FileStatus[] getFileStatusRecurse(Path path, int level, FileSystem fs) + throws IOException { + + // construct a path pattern (e.g., /*/*) to find all dynamically generated paths + StringBuilder sb = new StringBuilder(path.toUri().getPath()); + for (int i = 0; i < level; ++i) { + sb.append(Path.SEPARATOR).append("*"); + } + Path pathPattern = new Path(path, sb.toString()); + return fs.globStatus(pathPattern); + } + +} Index: ql/src/test/results/clientnegative/unset_table_property.q.out =================================================================== --- ql/src/test/results/clientnegative/unset_table_property.q.out (revision 1478217) +++ ql/src/test/results/clientnegative/unset_table_property.q.out (working copy) @@ -16,11 +16,13 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 FAILED: SemanticException [Error 10215]: Please use the following syntax if not sure whether the property existed or not: ALTER TABLE tableName UNSET TBLPROPERTIES IF EXISTS (key1, key2, ...) The following property z does not exist in testtable Index: ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out =================================================================== --- ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out (revision 1478217) +++ ql/src/test/results/clientnegative/stats_partialscan_autogether.q.out (working copy) @@ -71,6 +71,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5293 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/merge4.q.out =================================================================== --- ql/src/test/results/clientpositive/merge4.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/merge4.q.out (working copy) @@ -2987,14 +2987,14 @@ POSTHOOK: Output: default@nzhang_part@ds=2010-08-15/hr=file, POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: show partitions nzhang_part @@ -3003,14 +3003,14 @@ POSTHOOK: type: SHOWPARTITIONS POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] ds=2010-08-15/hr=11 @@ -3028,14 +3028,14 @@ #### A masked pattern was here #### POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=11).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).key SIMPLE [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=12).value SIMPLE [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).key EXPRESSION [(srcpart)srcpart.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: nzhang_part PARTITION(ds=2010-08-15,hr=file,).value EXPRESSION [(srcpart)srcpart.FieldSchema(name:value, type:string, comment:default), ] 1 1 2010-08-15 file, Index: ql/src/test/results/clientpositive/stats20.q.out =================================================================== --- ql/src/test/results/clientpositive/stats20.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats20.q.out (working copy) @@ -41,11 +41,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -100,11 +95,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 0 - totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/infer_bucket_sort.q.out =================================================================== --- ql/src/test/results/clientpositive/infer_bucket_sort.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/infer_bucket_sort.q.out (working copy) @@ -135,20 +135,20 @@ POSTHOOK: Output: default@test_table@part=1 POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') POSTHOOK: type: DESCTABLE POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -199,10 +199,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -211,10 +211,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -267,10 +267,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -281,10 +281,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -337,12 +337,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') PREHOOK: type: DESCTABLE POSTHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -353,12 +353,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] # col_name data_type comment key string None @@ -411,12 +411,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -429,12 +429,12 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] # col_name data_type comment @@ -489,13 +489,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -509,13 +509,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -573,13 +573,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -595,13 +595,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -661,13 +661,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -685,13 +685,13 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -751,6 +751,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -758,8 +760,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -777,6 +777,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -784,8 +786,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)c.FieldSchema(name:value, type:string, comment:default), ] @@ -845,6 +845,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -852,8 +854,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -873,6 +873,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -880,8 +882,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -943,6 +943,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -952,8 +954,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -973,6 +973,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -982,8 +984,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1045,6 +1045,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1056,8 +1058,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1077,6 +1077,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1088,8 +1090,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)b.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1151,6 +1151,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1164,8 +1166,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1185,6 +1185,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1198,8 +1200,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1261,6 +1261,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1276,8 +1278,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1297,6 +1297,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1312,8 +1314,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1375,6 +1375,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1392,8 +1394,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1413,6 +1413,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1430,8 +1432,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1493,6 +1493,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1512,8 +1514,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1533,6 +1533,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1552,8 +1554,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1615,6 +1615,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1636,8 +1638,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1657,6 +1657,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1678,8 +1680,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1743,6 +1743,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1764,8 +1766,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1787,6 +1787,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1808,8 +1810,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1875,6 +1875,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1896,8 +1898,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -1921,6 +1921,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -1942,8 +1944,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SCRIPT [] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2009,6 +2009,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2032,8 +2034,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2057,6 +2057,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2080,8 +2082,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2147,6 +2147,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2172,8 +2174,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2197,6 +2197,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2222,8 +2224,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2291,6 +2291,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2316,8 +2318,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2343,6 +2343,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SCRIPT [(src)src.FieldSchema(name:key, type:string, comment:default), (src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2368,8 +2370,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2439,6 +2439,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2464,8 +2466,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -2493,6 +2493,8 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)a.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)a.FieldSchema(name:key, type:string, comment:default), ] @@ -2518,8 +2520,6 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/groupby_ppr.q.out (working copy) @@ -88,15 +88,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -134,15 +129,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats8.q.out =================================================================== --- ql/src/test/results/clientpositive/stats8.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats8.q.out (working copy) @@ -159,11 +159,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -752,11 +747,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 4 - numPartitions 4 - numRows 2000 - rawDataSize 21248 - totalSize 23248 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/create_like_view.q.out =================================================================== --- ql/src/test/results/clientpositive/create_like_view.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/create_like_view.q.out (working copy) @@ -136,6 +136,8 @@ Table Type: EXTERNAL_TABLE Table Parameters: EXTERNAL TRUE + numFiles 0 + totalSize 0 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/input_part7.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part7.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/input_part7.q.out (working copy) @@ -168,15 +168,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -214,15 +209,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketmapjoin5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin5.q.out (working copy) @@ -264,15 +264,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -311,15 +306,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -761,7 +751,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -814,15 +803,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -861,15 +845,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6124 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -902,7 +881,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -937,7 +915,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -966,7 +943,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -985,7 +961,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1018,7 +993,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1047,7 +1021,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1066,7 +1039,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 928 rawDataSize 17038 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/pcr.q.out =================================================================== --- ql/src/test/results/clientpositive/pcr.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/pcr.q.out (working copy) @@ -141,15 +141,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -186,15 +181,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -339,15 +329,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -384,15 +369,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -429,15 +409,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -626,15 +601,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -671,15 +641,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -832,15 +797,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -877,15 +837,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1040,15 +995,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1085,15 +1035,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1130,15 +1075,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1304,15 +1244,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1349,15 +1284,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1394,15 +1324,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1572,15 +1497,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1617,15 +1537,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1757,15 +1672,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1802,15 +1712,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -1982,15 +1887,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2027,15 +1927,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2072,15 +1967,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2286,15 +2176,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2331,15 +2216,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2489,15 +2369,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2766,15 +2641,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -2811,15 +2681,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 3 - numPartitions 3 - numRows 60 partition_columns ds - rawDataSize 480 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 540 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3103,15 +2968,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3148,15 +3008,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3193,15 +3048,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3238,15 +3088,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3441,15 +3286,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3486,15 +3326,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3531,15 +3366,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -3806,15 +3636,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4275,7 +4100,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4316,7 +4140,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4365,15 +4188,10 @@ columns.types int:string #### A masked pattern was here #### name default.pcr_t1 - numFiles 4 - numPartitions 4 - numRows 80 partition_columns ds - rawDataSize 640 serialization.ddl struct pcr_t1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 720 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.pcr_t1 @@ -4405,7 +4223,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4440,7 +4257,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4469,7 +4285,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4488,7 +4303,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4521,7 +4335,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4550,7 +4363,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4569,7 +4381,6 @@ #### A masked pattern was here #### name default.pcr_t2 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t2 { i32 key, string value} @@ -4613,7 +4424,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4648,7 +4458,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4677,7 +4486,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4696,7 +4504,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4729,7 +4536,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4758,7 +4564,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4777,7 +4582,6 @@ #### A masked pattern was here #### name default.pcr_t3 numFiles 1 - numPartitions 0 numRows 20 rawDataSize 160 serialization.ddl struct pcr_t3 { i32 key, string value} @@ -4920,15 +4724,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5092,15 +4891,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5138,15 +4932,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5316,15 +5105,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -5362,15 +5146,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/show_tblproperties.q.out =================================================================== --- ql/src/test/results/clientpositive/show_tblproperties.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/show_tblproperties.q.out (working copy) @@ -29,10 +29,12 @@ POSTHOOK: query: show tblproperties tmpfoo POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### tmp true #### A masked pattern was here #### bar bar value +totalSize 0 PREHOOK: query: show tblproperties tmpfoo("bar") PREHOOK: type: SHOW_TBLPROPERTIES POSTHOOK: query: show tblproperties tmpfoo("bar") Index: ql/src/test/results/clientpositive/stats3.q.out =================================================================== --- ql/src/test/results/clientpositive/stats3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats3.q.out (working copy) @@ -79,7 +79,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 11 @@ -233,11 +232,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 6 - rawDataSize 6 - totalSize 171 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/join33.q.out =================================================================== --- ql/src/test/results/clientpositive/join33.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join33.q.out (working copy) @@ -148,7 +148,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -167,7 +166,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -190,7 +188,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -209,7 +206,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} Index: ql/src/test/results/clientpositive/input_part2.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/input_part2.q.out (working copy) @@ -163,15 +163,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -209,15 +204,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/alter_partition_coltype.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_partition_coltype.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/alter_partition_coltype.q.out (working copy) @@ -163,15 +163,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 2 - numPartitions 2 - numRows 50 partition_columns dt/ts - rawDataSize 382 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 432 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -351,15 +346,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -506,15 +496,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -552,15 +537,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -598,15 +578,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -837,15 +812,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -883,15 +853,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -929,15 +894,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -1071,15 +1031,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -1117,15 +1072,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype @@ -1163,15 +1113,10 @@ columns.types string:string #### A masked pattern was here #### name default.alter_coltype - numFiles 3 - numPartitions 3 - numRows 75 partition_columns dt/ts - rawDataSize 573 serialization.ddl struct alter_coltype { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 648 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.alter_coltype Index: ql/src/test/results/clientpositive/stats_noscan_1.q.out =================================================================== --- ql/src/test/results/clientpositive/stats_noscan_1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats_noscan_1.q.out (working copy) @@ -228,6 +228,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -271,6 +273,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -314,11 +318,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 0 - rawDataSize 0 - totalSize 11624 #### A masked pattern was here #### # Storage Information @@ -613,6 +612,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -664,6 +665,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_4.q.out (working copy) @@ -157,15 +157,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -306,15 +301,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -486,15 +476,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -533,15 +518,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -580,15 +560,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -718,15 +693,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -765,15 +735,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -812,15 +777,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -928,15 +888,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/load_dyn_part8.q.out =================================================================== --- ql/src/test/results/clientpositive/load_dyn_part8.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/load_dyn_part8.q.out (working copy) @@ -173,15 +173,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -219,15 +214,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -265,15 +255,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -311,15 +296,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/sample9.q.out =================================================================== --- ql/src/test/results/clientpositive/sample9.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample9.q.out (working copy) @@ -67,7 +67,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -87,7 +86,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/describe_table.q.out =================================================================== --- ql/src/test/results/clientpositive/describe_table.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/describe_table.q.out (working copy) @@ -91,11 +91,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 4 - numPartitions 4 - numRows 0 - rawDataSize 0 - totalSize 23248 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/groupby_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/groupby_map_ppr.q.out (working copy) @@ -105,15 +105,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -151,15 +146,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/groupby_sort_6.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_6.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/groupby_sort_6.q.out (working copy) @@ -259,7 +259,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -288,7 +287,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -425,15 +423,10 @@ columns.types string:string #### A masked pattern was here #### name default.t1 - numFiles 1 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct t1 { string key, string val} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1 @@ -471,7 +464,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -502,7 +494,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} Index: ql/src/test/results/clientpositive/sample4.q.out =================================================================== --- ql/src/test/results/clientpositive/sample4.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample4.q.out (working copy) @@ -89,7 +89,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -109,7 +108,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/bucketcontext_7.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_7.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketcontext_7.q.out (working copy) @@ -200,15 +200,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -248,15 +243,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -400,15 +390,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -448,15 +433,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/groupby_sort_1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/groupby_sort_1.q.out (working copy) @@ -130,7 +130,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -151,7 +150,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -461,7 +459,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -482,7 +479,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -681,7 +677,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -712,7 +707,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -733,7 +727,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -771,7 +764,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -806,7 +798,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -835,7 +826,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -854,7 +844,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -887,7 +876,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -916,7 +904,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -935,7 +922,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1074,7 +1060,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1105,7 +1090,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1126,7 +1110,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1164,7 +1147,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1199,7 +1181,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1228,7 +1209,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1247,7 +1227,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1280,7 +1259,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1309,7 +1287,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1328,7 +1305,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1521,7 +1497,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1542,7 +1517,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1898,7 +1872,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1919,7 +1892,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2163,7 +2135,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2184,7 +2155,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2232,7 +2202,6 @@ #### A masked pattern was here #### name default.outputtbl3 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 25 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} @@ -2263,7 +2232,6 @@ #### A masked pattern was here #### name default.outputtbl3 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 25 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} @@ -2462,7 +2430,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2483,7 +2450,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2527,7 +2493,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2558,7 +2523,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2755,7 +2719,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2817,7 +2780,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2848,7 +2810,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2869,7 +2830,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2907,7 +2867,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2942,7 +2901,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2971,7 +2929,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2990,7 +2947,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3023,7 +2979,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3052,7 +3007,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3071,7 +3025,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3287,7 +3240,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3308,7 +3260,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3384,7 +3335,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3446,7 +3396,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3493,7 +3442,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3514,7 +3462,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3553,7 +3500,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3588,7 +3534,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3617,7 +3562,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3636,7 +3580,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3669,7 +3612,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3698,7 +3640,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3717,7 +3658,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3979,7 +3919,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4000,7 +3939,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4043,7 +3981,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 32 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4074,7 +4011,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 32 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4293,7 +4229,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4314,7 +4249,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4457,7 +4391,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4478,7 +4411,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4715,7 +4647,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -4736,7 +4667,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -4780,7 +4710,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4811,7 +4740,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -5030,7 +4958,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5061,7 +4988,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5082,7 +5008,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5120,7 +5045,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5155,7 +5079,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5184,7 +5107,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5203,7 +5125,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5236,7 +5157,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5265,7 +5185,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5284,7 +5203,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5592,7 +5510,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5613,7 +5530,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6037,7 +5953,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6068,7 +5983,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6089,7 +6003,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6127,7 +6040,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6162,7 +6074,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6191,7 +6102,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6210,7 +6120,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6243,7 +6152,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6272,7 +6180,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6291,7 +6198,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6568,7 +6474,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6599,7 +6504,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6620,7 +6524,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6658,7 +6561,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6693,7 +6595,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6722,7 +6623,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6741,7 +6641,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6774,7 +6673,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6803,7 +6701,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6822,7 +6719,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} Index: ql/src/test/results/clientpositive/stats13.q.out =================================================================== --- ql/src/test/results/clientpositive/stats13.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats13.q.out (working copy) @@ -78,10 +78,12 @@ columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart + numFiles 1 partition_columns ds/hr serialization.ddl struct analyze_srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -160,11 +162,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -255,6 +252,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -298,6 +297,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -341,6 +342,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/udf_reflect2.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reflect2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/udf_reflect2.q.out (working copy) @@ -201,7 +201,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -220,7 +219,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_11.q.out (working copy) @@ -190,15 +190,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -237,15 +232,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -284,15 +274,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -508,15 +493,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -555,15 +535,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -602,15 +577,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -818,15 +788,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -865,15 +830,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/combine2_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/combine2_hadoop20.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/combine2_hadoop20.q.out (working copy) @@ -268,15 +268,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -313,15 +308,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -358,15 +348,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -403,15 +388,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -448,15 +428,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -493,15 +468,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -538,15 +508,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 @@ -583,15 +548,10 @@ columns.types string #### A masked pattern was here #### name default.combine2 - numFiles 8 - numPartitions 8 - numRows 12 partition_columns value - rawDataSize 14 serialization.ddl struct combine2 { string key} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.combine2 Index: ql/src/test/results/clientpositive/rand_partitionpruner1.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/rand_partitionpruner1.q.out (working copy) @@ -64,7 +64,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -83,7 +82,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/show_create_table_alter.q.out =================================================================== --- ql/src/test/results/clientpositive/show_create_table_alter.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/show_create_table_alter.q.out (working copy) @@ -68,7 +68,9 @@ #### A masked pattern was here #### TBLPROPERTIES ( 'EXTERNAL'='FALSE', + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: -- Alter the table comment, change the EXTERNAL property back and test SHOW CREATE TABLE on the change. ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -103,7 +105,9 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: -- Change the 'SORTBUCKETCOLSPREFIX' property and test SHOW CREATE TABLE. The output should not change. ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -138,7 +142,9 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: -- Alter the storage handler of the table, and test SHOW CREATE TABLE. ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -173,7 +179,9 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@tmp_showcrt1 Index: ql/src/test/results/clientpositive/bucketcontext_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketcontext_2.q.out (working copy) @@ -175,15 +175,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -223,15 +218,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -373,15 +363,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -421,15 +406,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucket2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucket2.q.out (working copy) @@ -61,7 +61,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -80,7 +79,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/infer_bucket_sort_multi_insert.q.out =================================================================== --- ql/src/test/results/clientpositive/infer_bucket_sort_multi_insert.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/infer_bucket_sort_multi_insert.q.out (working copy) @@ -259,10 +259,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] PREHOOK: query: DESCRIBE FORMATTED test_table PARTITION (part = '1') @@ -275,10 +275,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] # col_name data_type comment @@ -325,10 +325,10 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] # col_name data_type comment @@ -391,11 +391,11 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -411,11 +411,11 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] @@ -465,11 +465,11 @@ POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=1).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=1).value EXPRESSION [(src)src.null, ] -POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] -POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table PARTITION(part=2).key SIMPLE [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).value EXPRESSION [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table PARTITION(part=2).key EXPRESSION [(src)src.null, ] POSTHOOK: Lineage: test_table PARTITION(part=2).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] Index: ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/groupby_map_ppr_multi_distinct.q.out (working copy) @@ -115,15 +115,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -161,15 +156,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/filter_join_breaktask.q.out =================================================================== --- ql/src/test/results/clientpositive/filter_join_breaktask.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/filter_join_breaktask.q.out (working copy) @@ -117,15 +117,10 @@ columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask - numFiles 1 - numPartitions 1 - numRows 25 partition_columns ds - rawDataSize 211 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 236 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask @@ -246,15 +241,10 @@ columns.types int:string #### A masked pattern was here #### name default.filter_join_breaktask - numFiles 1 - numPartitions 1 - numRows 25 partition_columns ds - rawDataSize 211 serialization.ddl struct filter_join_breaktask { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 236 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.filter_join_breaktask Index: ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_5.q.out (working copy) @@ -144,15 +144,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/join17.q.out =================================================================== --- ql/src/test/results/clientpositive/join17.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join17.q.out (working copy) @@ -75,7 +75,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -94,7 +93,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/input_part9.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part9.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/input_part9.q.out (working copy) @@ -89,15 +89,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -135,15 +130,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketmapjoin7.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin7.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin7.q.out (working copy) @@ -175,15 +175,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/alter_table_serde2.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_table_serde2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/alter_table_serde2.q.out (working copy) @@ -132,12 +132,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/bucketmapjoin11.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin11.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin11.q.out (working copy) @@ -229,15 +229,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -276,15 +271,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -469,15 +459,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -516,15 +501,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 6 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 8562 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/join26.q.out =================================================================== --- ql/src/test/results/clientpositive/join26.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join26.q.out (working copy) @@ -165,15 +165,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative.q.out (working copy) @@ -163,7 +163,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -183,7 +182,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} Index: ql/src/test/results/clientpositive/stats5.q.out =================================================================== --- ql/src/test/results/clientpositive/stats5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats5.q.out (working copy) @@ -54,7 +54,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 Index: ql/src/test/results/clientpositive/ppd_join_filter.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_join_filter.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/ppd_join_filter.q.out (working copy) @@ -82,7 +82,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -101,7 +100,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -222,7 +220,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -241,7 +238,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -414,7 +410,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -433,7 +428,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -554,7 +548,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -573,7 +566,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -746,7 +738,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -765,7 +756,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -886,7 +876,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -905,7 +894,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1078,7 +1066,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1097,7 +1084,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1218,7 +1204,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1237,7 +1222,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/join35.q.out =================================================================== --- ql/src/test/results/clientpositive/join35.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join35.q.out (working copy) @@ -97,7 +97,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -116,7 +115,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -337,7 +335,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -356,7 +353,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -535,7 +531,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -554,7 +549,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -669,7 +663,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -688,7 +681,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -799,7 +791,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -818,7 +809,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketmapjoin2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin2.q.out (working copy) @@ -209,15 +209,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -658,7 +653,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -711,15 +705,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -751,7 +740,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -786,7 +774,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -815,7 +802,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -834,7 +820,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -867,7 +852,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -896,7 +880,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -915,7 +898,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1354,7 +1336,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1407,15 +1388,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -1447,7 +1423,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1482,7 +1457,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1511,7 +1485,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1530,7 +1503,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1563,7 +1535,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1592,7 +1563,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1611,7 +1581,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/alter_partition_clusterby_sortby.q.out (working copy) @@ -46,6 +46,9 @@ #### A masked pattern was here #### Partition Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -93,6 +96,9 @@ #### A masked pattern was here #### Partition Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -140,6 +146,9 @@ #### A masked pattern was here #### Partition Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/join_map_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/join_map_ppr.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join_map_ppr.q.out (working copy) @@ -167,15 +167,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -668,7 +663,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -720,15 +714,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -760,7 +749,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -795,7 +783,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -824,7 +811,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -843,7 +829,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -876,7 +861,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -905,7 +889,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -924,7 +907,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 107 rawDataSize 2018 serialization.ddl struct dest_j1 { string key, string value, string val2} Index: ql/src/test/results/clientpositive/stats0.q.out =================================================================== --- ql/src/test/results/clientpositive/stats0.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats0.q.out (working copy) @@ -74,7 +74,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -93,7 +92,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1416,7 +1414,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1435,7 +1432,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/join9.q.out =================================================================== --- ql/src/test/results/clientpositive/join9.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join9.q.out (working copy) @@ -71,7 +71,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -90,7 +89,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -134,15 +132,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/smb_mapjoin_11.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_11.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/smb_mapjoin_11.q.out (working copy) @@ -156,15 +156,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table1 - numFiles 16 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 Index: ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out =================================================================== --- ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/ppr_allchildsarenull.q.out (working copy) @@ -97,15 +97,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -143,15 +138,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -288,15 +278,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -334,15 +319,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -380,15 +360,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -426,15 +401,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_1.q.out (working copy) @@ -161,15 +161,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -209,15 +204,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -359,15 +349,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -407,15 +392,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -588,15 +568,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -636,15 +611,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -683,15 +653,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -822,15 +787,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -870,15 +830,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -917,15 +872,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1032,15 +982,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1080,15 +1025,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/sample6.q.out =================================================================== --- ql/src/test/results/clientpositive/sample6.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample6.q.out (working copy) @@ -87,7 +87,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -107,7 +106,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -631,7 +629,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -651,7 +648,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -1005,7 +1001,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -1025,7 +1020,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -1633,7 +1627,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -1653,7 +1646,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -2104,7 +2096,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -2124,7 +2115,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -2561,7 +2551,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2581,7 +2570,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2605,7 +2593,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2625,7 +2612,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2863,7 +2849,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} @@ -2883,7 +2868,6 @@ #### A masked pattern was here #### name default.srcbucket2 numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket2 { i32 key, string value} Index: ql/src/test/results/clientpositive/join_filters_overlap.q.out =================================================================== --- ql/src/test/results/clientpositive/join_filters_overlap.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join_filters_overlap.q.out (working copy) @@ -106,7 +106,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -125,7 +124,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -312,7 +310,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -331,7 +328,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -518,7 +514,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -537,7 +532,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -742,7 +736,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -761,7 +754,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -970,7 +962,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} @@ -989,7 +980,6 @@ #### A masked pattern was here #### name default.a numFiles 1 - numPartitions 0 numRows 3 rawDataSize 18 serialization.ddl struct a { i32 key, i32 value} Index: ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out =================================================================== --- ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/create_alter_list_bucketing_table1.q.out (working copy) @@ -75,6 +75,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -117,6 +120,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -163,6 +169,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -205,6 +214,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -246,6 +258,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/bucket_map_join_1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_map_join_1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucket_map_join_1.q.out (working copy) @@ -127,7 +127,6 @@ #### A masked pattern was here #### name default.table1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct table1 { string key, string value} @@ -148,7 +147,6 @@ #### A masked pattern was here #### name default.table1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct table1 { string key, string value} Index: ql/src/test/results/clientpositive/sample1.q.out =================================================================== --- ql/src/test/results/clientpositive/sample1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample1.q.out (working copy) @@ -113,15 +113,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats15.q.out =================================================================== --- ql/src/test/results/clientpositive/stats15.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats15.q.out (working copy) @@ -43,7 +43,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -175,11 +174,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information @@ -345,11 +339,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/stats_partscan_1.q.out =================================================================== --- ql/src/test/results/clientpositive/stats_partscan_1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats_partscan_1.q.out (working copy) @@ -71,6 +71,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5293 #### A masked pattern was here #### # Storage Information @@ -214,6 +216,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5293 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/reduce_deduplicate.q.out =================================================================== --- ql/src/test/results/clientpositive/reduce_deduplicate.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/reduce_deduplicate.q.out (working copy) @@ -64,7 +64,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -83,7 +82,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/rand_partitionpruner3.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/rand_partitionpruner3.q.out (working copy) @@ -91,15 +91,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -218,15 +213,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_4.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketcontext_4.q.out (working copy) @@ -187,15 +187,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -336,15 +331,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/stats10.q.out =================================================================== --- ql/src/test/results/clientpositive/stats10.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats10.q.out (working copy) @@ -550,11 +550,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 4 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/bucket4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket4.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucket4.q.out (working copy) @@ -64,7 +64,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -83,7 +82,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/merge3.q.out =================================================================== --- ql/src/test/results/clientpositive/merge3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/merge3.q.out (working copy) @@ -116,7 +116,6 @@ #### A masked pattern was here #### name default.merge_src numFiles 4 - numPartitions 0 numRows 2000 rawDataSize 21248 serialization.ddl struct merge_src { string key, string value} @@ -135,7 +134,6 @@ #### A masked pattern was here #### name default.merge_src numFiles 4 - numPartitions 0 numRows 2000 rawDataSize 21248 serialization.ddl struct merge_src { string key, string value} @@ -2329,7 +2327,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 2000 rawDataSize 21248 totalSize 23248 @@ -2459,15 +2456,10 @@ columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 4 - numPartitions 2 - numRows 2000 partition_columns ds - rawDataSize 21248 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -2504,15 +2496,10 @@ columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 4 - numPartitions 2 - numRows 2000 partition_columns ds - rawDataSize 21248 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -4889,15 +4876,10 @@ columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 4 - numPartitions 2 - numRows 2000 partition_columns ds - rawDataSize 21248 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part @@ -4934,15 +4916,10 @@ columns.types string:string #### A masked pattern was here #### name default.merge_src_part - numFiles 4 - numPartitions 2 - numRows 2000 partition_columns ds - rawDataSize 21248 serialization.ddl struct merge_src_part { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.merge_src_part Index: ql/src/test/results/clientpositive/udtf_explode.q.out =================================================================== --- ql/src/test/results/clientpositive/udtf_explode.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/udtf_explode.q.out (working copy) @@ -68,7 +68,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -87,7 +86,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -156,7 +154,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -175,7 +172,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -422,7 +418,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -441,7 +436,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_7.q.out (working copy) @@ -231,15 +231,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -278,15 +273,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/binary_output_format.q.out =================================================================== --- ql/src/test/results/clientpositive/binary_output_format.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/binary_output_format.q.out (working copy) @@ -129,7 +129,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -148,7 +147,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketmapjoin9.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin9.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin9.q.out (working copy) @@ -167,15 +167,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -388,15 +383,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/bucketmapjoin13.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin13.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin13.q.out (working copy) @@ -195,15 +195,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -242,15 +237,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -449,15 +439,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -667,15 +652,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -887,15 +867,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 4 - numPartitions 2 - numRows 1000 partition_columns part - rawDataSize 10624 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/stats7.q.out =================================================================== --- ql/src/test/results/clientpositive/stats7.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats7.q.out (working copy) @@ -210,11 +210,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/show_create_table_serde.q.out =================================================================== --- ql/src/test/results/clientpositive/show_create_table_serde.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/show_create_table_serde.q.out (working copy) @@ -37,7 +37,9 @@ LOCATION #### A masked pattern was here #### TBLPROPERTIES ( + 'numFiles'='0', #### A masked pattern was here #### + 'totalSize'='0') PREHOOK: query: DROP TABLE tmp_showcrt1 PREHOOK: type: DROPTABLE PREHOOK: Input: default@tmp_showcrt1 Index: ql/src/test/results/clientpositive/smb_mapjoin_18.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_18.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/smb_mapjoin_18.q.out (working copy) @@ -500,10 +500,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select count(*) from test_table2 where ds = '3' PREHOOK: type: QUERY PREHOOK: Input: default@test_table2 @@ -516,10 +516,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 PREHOOK: query: select count(*) from test_table2 where ds = '3' and hash(key) % 2 = 0 PREHOOK: type: QUERY @@ -533,10 +533,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 PREHOOK: query: select count(*) from test_table2 where ds = '3' and hash(key) % 2 = 1 PREHOOK: type: QUERY @@ -550,10 +550,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 PREHOOK: query: select count(*) from test_table2 tablesample (bucket 1 out of 2) s where ds = '3' PREHOOK: type: QUERY @@ -567,10 +567,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 PREHOOK: query: select count(*) from test_table2 tablesample (bucket 2 out of 2) s where ds = '3' PREHOOK: type: QUERY @@ -584,8 +584,8 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table2)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table2)a.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=2).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 Index: ql/src/test/results/clientpositive/bucketmapjoin4.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin4.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin4.q.out (working copy) @@ -205,7 +205,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -225,7 +224,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -660,7 +658,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -692,7 +689,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -712,7 +708,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -750,7 +745,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -785,7 +779,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -814,7 +807,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -833,7 +825,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -866,7 +857,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -895,7 +885,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -914,7 +903,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/union22.q.out =================================================================== --- ql/src/test/results/clientpositive/union22.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/union22.q.out (working copy) @@ -231,15 +231,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -276,15 +271,10 @@ columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 16936 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 17436 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -326,15 +316,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -389,15 +374,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -456,15 +436,10 @@ columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 16936 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 17436 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta @@ -489,15 +464,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -603,15 +573,10 @@ columns.types string:string:string:string #### A masked pattern was here #### name default.dst_union22 - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 11124 serialization.ddl struct dst_union22 { string k1, string k2, string k3, string k4} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22 @@ -648,15 +613,10 @@ columns.types string:string:string:string:string:string #### A masked pattern was here #### name default.dst_union22_delta - numFiles 1 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 16936 serialization.ddl struct dst_union22_delta { string k0, string k1, string k2, string k3, string k4, string k5} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 17436 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.dst_union22_delta Index: ql/src/test/results/clientpositive/stats2.q.out =================================================================== --- ql/src/test/results/clientpositive/stats2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats2.q.out (working copy) @@ -221,11 +221,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 4 - numPartitions 4 - numRows 2000 - rawDataSize 21248 - totalSize 23248 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/smb_mapjoin_13.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_13.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/smb_mapjoin_13.q.out (working copy) @@ -143,7 +143,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} @@ -164,7 +163,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} @@ -344,7 +342,6 @@ #### A masked pattern was here #### name default.test_table3 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table3 { i32 key, string value} @@ -365,7 +362,6 @@ #### A masked pattern was here #### name default.test_table3 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table3 { i32 key, string value} Index: ql/src/test/results/clientpositive/unset_table_view_property.q.out =================================================================== --- ql/src/test/results/clientpositive/unset_table_view_property.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/unset_table_view_property.q.out (working copy) @@ -24,11 +24,13 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET all the properties ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'c') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -44,7 +46,9 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'c'='3', 'd'='4') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@testtable @@ -59,11 +63,13 @@ POSTHOOK: type: SHOW_TBLPROPERTIES d 4 +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET a subset of the properties ALTER TABLE testTable UNSET TBLPROPERTIES ('a', 'd') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -79,9 +85,11 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- the same property being UNSET multiple times ALTER TABLE testTable UNSET TBLPROPERTIES ('c', 'c', 'c') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -97,7 +105,9 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: ALTER TABLE testTable SET TBLPROPERTIES ('a'='1', 'b' = '2', 'c'='3', 'd'='4') PREHOOK: type: ALTERTABLE_PROPERTIES PREHOOK: Input: default@testtable @@ -112,12 +122,14 @@ POSTHOOK: type: SHOW_TBLPROPERTIES d 4 +numFiles 0 #### A masked pattern was here #### b 2 c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'b', 'f') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -133,11 +145,13 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### c 3 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET a subset of the properties and some non-existed properties using IF EXISTS ALTER TABLE testTable UNSET TBLPROPERTIES IF EXISTS ('b', 'd', 'c', 'f', 'x', 'y', 'z') PREHOOK: type: ALTERTABLE_PROPERTIES @@ -153,9 +167,11 @@ POSTHOOK: query: SHOW TBLPROPERTIES testTable POSTHOOK: type: SHOW_TBLPROPERTIES +numFiles 0 #### A masked pattern was here #### a 1 #### A masked pattern was here #### +totalSize 0 PREHOOK: query: -- UNSET VIEW PROPERTIES CREATE VIEW testView AS SELECT value FROM src WHERE key=86 PREHOOK: type: CREATEVIEW Index: ql/src/test/results/clientpositive/join32.q.out =================================================================== --- ql/src/test/results/clientpositive/join32.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join32.q.out (working copy) @@ -148,7 +148,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -167,7 +166,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -190,7 +188,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -209,7 +206,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} Index: ql/src/test/results/clientpositive/ctas_uses_database_location.q.out =================================================================== --- ql/src/test/results/clientpositive/ctas_uses_database_location.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/ctas_uses_database_location.q.out (working copy) @@ -141,7 +141,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 Index: ql/src/test/results/clientpositive/input_part1.q.out =================================================================== --- ql/src/test/results/clientpositive/input_part1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/input_part1.q.out (working copy) @@ -109,15 +109,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/groupby_sort_skew_1.q.out (working copy) @@ -130,7 +130,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -151,7 +150,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -460,7 +458,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -481,7 +478,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -750,7 +746,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -781,7 +776,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -802,7 +796,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -840,7 +833,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -875,7 +867,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -904,7 +895,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -923,7 +913,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -956,7 +945,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -985,7 +973,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1004,7 +991,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1143,7 +1129,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1174,7 +1159,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1195,7 +1179,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1233,7 +1216,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1268,7 +1250,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1297,7 +1278,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1316,7 +1296,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1349,7 +1328,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1378,7 +1356,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1397,7 +1374,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -1590,7 +1566,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1611,7 +1586,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1964,7 +1938,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -1985,7 +1958,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2304,7 +2276,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2325,7 +2296,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2443,7 +2413,6 @@ #### A masked pattern was here #### name default.outputtbl3 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 25 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} @@ -2474,7 +2443,6 @@ #### A masked pattern was here #### name default.outputtbl3 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 25 serialization.ddl struct outputtbl3 { i32 key1, i32 key2, i32 cnt} @@ -2674,7 +2642,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2695,7 +2662,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -2803,7 +2769,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -2834,7 +2799,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3031,7 +2995,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3093,7 +3056,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3124,7 +3086,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3145,7 +3106,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3183,7 +3143,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3218,7 +3177,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3247,7 +3205,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3266,7 +3223,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3299,7 +3255,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3328,7 +3283,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3347,7 +3301,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 17 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3564,7 +3517,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3585,7 +3537,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3725,7 +3676,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3787,7 +3737,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3834,7 +3783,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3855,7 +3803,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -3894,7 +3841,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3929,7 +3875,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3958,7 +3903,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -3977,7 +3921,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4010,7 +3953,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4039,7 +3981,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4058,7 +3999,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 30 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4320,7 +4260,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4341,7 +4280,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4384,7 +4322,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 32 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4415,7 +4352,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 10 rawDataSize 32 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -4633,7 +4569,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4654,7 +4589,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4867,7 +4801,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -4888,7 +4821,6 @@ #### A masked pattern was here #### name default.t1 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t1 { string key, string val} @@ -5126,7 +5058,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5147,7 +5078,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5255,7 +5185,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -5286,7 +5215,6 @@ #### A masked pattern was here #### name default.outputtbl1 numFiles 1 - numPartitions 0 numRows 5 rawDataSize 15 serialization.ddl struct outputtbl1 { i32 key, i32 cnt} @@ -5505,7 +5433,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5536,7 +5463,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5557,7 +5483,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -5595,7 +5520,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5630,7 +5554,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5659,7 +5582,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5678,7 +5600,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5711,7 +5632,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5740,7 +5660,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -5759,7 +5678,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6067,7 +5985,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6088,7 +6005,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6512,7 +6428,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6543,7 +6458,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6564,7 +6478,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -6602,7 +6515,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6637,7 +6549,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6666,7 +6577,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6685,7 +6595,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6718,7 +6627,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6747,7 +6655,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -6766,7 +6673,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7043,7 +6949,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7074,7 +6979,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -7095,7 +6999,6 @@ #### A masked pattern was here #### name default.t2 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 24 serialization.ddl struct t2 { string key, string val} @@ -7133,7 +7036,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7168,7 +7070,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7197,7 +7098,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7216,7 +7116,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7249,7 +7148,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7278,7 +7176,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} @@ -7297,7 +7194,6 @@ #### A masked pattern was here #### name default.outputtbl4 numFiles 1 - numPartitions 0 numRows 6 rawDataSize 48 serialization.ddl struct outputtbl4 { i32 key1, i32 key2, string key3, i32 cnt} Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_8.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_8.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_8.q.out (working copy) @@ -319,12 +319,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -339,12 +339,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 0 val_0val_0 1 0 0 val_0val_0 1 0 0 val_0val_0 1 @@ -371,12 +371,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key2 SIMPLE [(test_table2)b.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 5 val_5val_5 1 5 5 val_5val_5 1 5 5 val_5val_5 1 Index: ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_8.q.out (working copy) @@ -170,15 +170,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -218,15 +213,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -370,15 +360,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -418,15 +403,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -603,15 +583,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -651,15 +626,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -698,15 +668,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -745,15 +710,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -884,15 +844,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -932,15 +887,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -979,15 +929,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1026,15 +971,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1142,15 +1082,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1190,15 +1125,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_3.q.out (working copy) @@ -269,10 +269,10 @@ POSTHOOK: Output: default@test_table2@ds=1 POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select count(*) from test_table2 where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table2 @@ -285,10 +285,10 @@ #### A masked pattern was here #### POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 500 PREHOOK: query: select count(*) from test_table2 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY @@ -302,10 +302,10 @@ #### A masked pattern was here #### POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 500 PREHOOK: query: select count(*) from test_table2 tablesample (bucket 2 out of 2) s where ds = '1' PREHOOK: type: QUERY @@ -319,8 +319,8 @@ #### A masked pattern was here #### POSTHOOK: Lineage: test_table1 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(test_table1)a.FieldSchema(name:value, type:string, comment:null), ] 0 Index: ql/src/test/results/clientpositive/columnstats_partlvl.q.out =================================================================== --- ql/src/test/results/clientpositive/columnstats_partlvl.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/columnstats_partlvl.q.out (working copy) @@ -164,15 +164,10 @@ field.delim | #### A masked pattern was here #### name default.employee_part - numFiles 2 - numPartitions 2 - numRows 0 partition_columns employeesalary - rawDataSize 0 serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 210 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.employee_part @@ -370,15 +365,10 @@ field.delim | #### A masked pattern was here #### name default.employee_part - numFiles 2 - numPartitions 2 - numRows 0 partition_columns employeesalary - rawDataSize 0 serialization.ddl struct employee_part { i32 employeeid, string employeename} serialization.format | serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 210 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.employee_part Index: ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_3.q.out (working copy) @@ -145,15 +145,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -294,15 +289,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -474,15 +464,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -521,15 +506,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -568,15 +548,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -706,15 +681,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -753,15 +723,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -800,15 +765,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -916,15 +876,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative3.q.out (working copy) @@ -217,7 +217,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -238,7 +237,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -366,7 +364,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -387,7 +384,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -509,7 +505,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -530,7 +525,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -650,7 +644,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -671,7 +664,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -791,7 +783,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -812,7 +803,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -932,7 +922,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -953,7 +942,6 @@ #### A masked pattern was here #### name default.test1 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test1 { string key, string value} @@ -1073,7 +1061,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -1094,7 +1081,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -1214,7 +1200,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -1235,7 +1220,6 @@ #### A masked pattern was here #### name default.test2 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test2 { string key, string value} @@ -1355,7 +1339,6 @@ #### A masked pattern was here #### name default.test3 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test3 { string key, string value} @@ -1376,7 +1359,6 @@ #### A masked pattern was here #### name default.test3 numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct test3 { string key, string value} Index: ql/src/test/results/clientpositive/sample8.q.out =================================================================== --- ql/src/test/results/clientpositive/sample8.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample8.q.out (working copy) @@ -103,15 +103,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -149,15 +144,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -195,15 +185,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -241,15 +226,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/transform_ppr2.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/transform_ppr2.q.out (working copy) @@ -110,15 +110,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -156,15 +151,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/union_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/union_ppr.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/union_ppr.q.out (working copy) @@ -170,15 +170,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -216,15 +211,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/alter_table_not_sorted.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_table_not_sorted.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/alter_table_not_sorted.q.out (working copy) @@ -60,6 +60,9 @@ Table Parameters: SORTBUCKETCOLSPREFIX TRUE #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table.q.out (working copy) @@ -190,12 +190,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -249,12 +243,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -347,12 +335,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -450,12 +432,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -510,12 +486,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -570,12 +540,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -630,12 +594,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/ctas_hadoop20.q.out =================================================================== --- ql/src/test/results/clientpositive/ctas_hadoop20.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/ctas_hadoop20.q.out (working copy) @@ -158,7 +158,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 96 totalSize 106 @@ -317,7 +316,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 96 totalSize 106 @@ -477,7 +475,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 120 totalSize 199 @@ -544,7 +541,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 120 totalSize 199 @@ -704,7 +700,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 96 totalSize 106 @@ -779,7 +774,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -798,7 +792,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketcontext_6.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_6.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketcontext_6.q.out (working copy) @@ -174,15 +174,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -222,15 +217,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -370,15 +360,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -418,15 +403,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/stats12.q.out =================================================================== --- ql/src/test/results/clientpositive/stats12.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats12.q.out (working copy) @@ -78,10 +78,12 @@ columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart + numFiles 1 partition_columns ds/hr serialization.ddl struct analyze_srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -115,10 +117,12 @@ columns.types string:string #### A masked pattern was here #### name default.analyze_srcpart + numFiles 1 partition_columns ds/hr serialization.ddl struct analyze_srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -202,11 +206,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information @@ -344,6 +343,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -387,6 +388,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/router_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/router_join_ppr.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/router_join_ppr.q.out (working copy) @@ -91,7 +91,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -110,7 +109,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -154,15 +152,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -200,15 +193,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -246,15 +234,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -292,15 +275,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -497,7 +475,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -516,7 +493,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -560,15 +536,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -606,15 +577,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -800,7 +766,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -819,7 +784,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -863,15 +827,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -909,15 +868,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1105,7 +1059,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1124,7 +1077,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1168,15 +1120,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1214,15 +1161,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1260,15 +1202,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1306,15 +1243,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketcontext_1.q.out (working copy) @@ -187,15 +187,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -235,15 +230,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -385,15 +375,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -433,15 +418,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucket1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucket1.q.out (working copy) @@ -61,7 +61,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -80,7 +79,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/input42.q.out =================================================================== --- ql/src/test/results/clientpositive/input42.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/input42.q.out (working copy) @@ -84,15 +84,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -130,15 +125,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1274,15 +1264,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1320,15 +1305,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1842,15 +1822,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1888,15 +1863,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/stats9.q.out =================================================================== --- ql/src/test/results/clientpositive/stats9.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats9.q.out (working copy) @@ -68,7 +68,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 1000 rawDataSize 10603 totalSize 11603 Index: ql/src/test/results/clientpositive/insert_into5.q.out =================================================================== --- ql/src/test/results/clientpositive/insert_into5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/insert_into5.q.out (working copy) @@ -487,10 +487,10 @@ POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5a.value SIMPLE [] POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: SELECT SUM(HASH(c)) FROM ( SELECT TRANSFORM(*) USING 'tr \t _' AS (c) FROM insert_into5b ) t @@ -509,10 +509,10 @@ POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5a.value SIMPLE [] POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] -37252105840 PREHOOK: query: DROP TABLE insert_into5a PREHOOK: type: DROPTABLE @@ -526,7 +526,7 @@ POSTHOOK: Lineage: insert_into5a.key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5a.value SIMPLE [] POSTHOOK: Lineage: insert_into5a.value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5b)insert_into5b.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).key SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: insert_into5b PARTITION(ds=1).value SIMPLE [(insert_into5a)insert_into5a.FieldSchema(name:value, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/bucketmapjoin10.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin10.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin10.q.out (working copy) @@ -215,15 +215,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 5 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6950 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -262,15 +257,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 5 - numPartitions 2 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 6950 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/union24.q.out =================================================================== --- ql/src/test/results/clientpositive/union24.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/union24.q.out (working copy) @@ -113,7 +113,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -132,7 +131,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -348,7 +346,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -367,7 +364,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -390,7 +386,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -409,7 +404,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -432,7 +426,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -451,7 +444,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -638,7 +630,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -657,7 +648,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -680,7 +670,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -699,7 +688,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -878,7 +866,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -897,7 +884,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -920,7 +906,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -939,7 +924,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -1113,7 +1097,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -1132,7 +1115,6 @@ #### A masked pattern was here #### name default.src4 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src4 { string key, i64 count} @@ -1155,7 +1137,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -1174,7 +1155,6 @@ #### A masked pattern was here #### name default.src5 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src5 { string key, i64 count} @@ -1431,7 +1411,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -1450,7 +1429,6 @@ #### A masked pattern was here #### name default.src2 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src2 { string key, i64 count} @@ -1473,7 +1451,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} @@ -1492,7 +1469,6 @@ #### A masked pattern was here #### name default.src3 numFiles 1 - numPartitions 0 numRows 309 rawDataSize 1482 serialization.ddl struct src3 { string key, i64 count} Index: ql/src/test/results/clientpositive/stats4.q.out =================================================================== --- ql/src/test/results/clientpositive/stats4.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats4.q.out (working copy) @@ -2540,11 +2540,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information @@ -2588,11 +2583,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/columnstats_tbllvl.q.out =================================================================== --- ql/src/test/results/clientpositive/columnstats_tbllvl.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/columnstats_tbllvl.q.out (working copy) @@ -178,7 +178,6 @@ #### A masked pattern was here #### name default.uservisits_web_text_none numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} @@ -198,7 +197,6 @@ #### A masked pattern was here #### name default.uservisits_web_text_none numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct uservisits_web_text_none { string sourceip, string desturl, string visitdate, float adrevenue, string useragent, string ccode, string lcode, string skeyword, i32 avgtimeonsite} Index: ql/src/test/results/clientpositive/smb_mapjoin_15.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_15.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/smb_mapjoin_15.q.out (working copy) @@ -117,7 +117,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} @@ -138,7 +137,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} @@ -366,7 +364,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -387,7 +384,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -555,7 +551,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -576,7 +571,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -776,7 +770,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} @@ -797,7 +790,6 @@ #### A masked pattern was here #### name default.test_table1 numFiles 16 - numPartitions 0 numRows 500 rawDataSize 7218 serialization.ddl struct test_table1 { i32 key, i32 key2, string value} Index: ql/src/test/results/clientpositive/join34.q.out =================================================================== --- ql/src/test/results/clientpositive/join34.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join34.q.out (working copy) @@ -203,7 +203,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -222,7 +221,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -245,7 +243,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -264,7 +261,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} Index: ql/src/test/results/clientpositive/bucketmapjoin1.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin1.q.out (working copy) @@ -445,7 +445,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -465,7 +464,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -908,7 +906,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -961,15 +958,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -1001,7 +993,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1036,7 +1027,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1065,7 +1055,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1084,7 +1073,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1117,7 +1105,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1146,7 +1133,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1165,7 +1151,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/sample10.q.out =================================================================== --- ql/src/test/results/clientpositive/sample10.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample10.q.out (working copy) @@ -134,15 +134,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 16 - numPartitions 4 - numRows 40 partition_columns ds/hr - rawDataSize 240 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 1228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -182,15 +177,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 16 - numPartitions 4 - numRows 40 partition_columns ds/hr - rawDataSize 240 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 1228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -230,15 +220,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 16 - numPartitions 4 - numRows 40 partition_columns ds/hr - rawDataSize 240 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 1228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket @@ -278,15 +263,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpartbucket - numFiles 16 - numPartitions 4 - numRows 40 partition_columns ds/hr - rawDataSize 240 serialization.ddl struct srcpartbucket { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe - totalSize 1228 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe name: default.srcpartbucket Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_5.q.out (working copy) @@ -692,10 +692,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -710,10 +710,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 8 val_8val_8 1 4 val_4val_4 1 2 val_2val_2 1 @@ -740,10 +740,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 9 val_9val_9 1 5 val_5val_5 1 5 val_5val_5 1 Index: ql/src/test/results/clientpositive/stats_noscan_2.q.out =================================================================== --- ql/src/test/results/clientpositive/stats_noscan_2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats_noscan_2.q.out (working copy) @@ -47,7 +47,6 @@ Table Parameters: EXTERNAL TRUE numFiles 0 - numPartitions 0 numRows 6 rawDataSize 6 totalSize 0 @@ -89,7 +88,6 @@ Table Parameters: EXTERNAL TRUE numFiles 0 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 0 Index: ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_5.q.out (working copy) @@ -109,7 +109,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -130,7 +129,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -250,7 +248,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -271,7 +268,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -422,7 +418,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -443,7 +438,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -481,7 +475,6 @@ #### A masked pattern was here #### name default.bucket_small numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} @@ -596,7 +589,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -617,7 +609,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -655,7 +646,6 @@ #### A masked pattern was here #### name default.bucket_small numFiles 4 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} @@ -747,7 +737,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -768,7 +757,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} Index: ql/src/test/results/clientpositive/louter_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/louter_join_ppr.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/louter_join_ppr.q.out (working copy) @@ -89,7 +89,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -108,7 +107,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -152,15 +150,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -198,15 +191,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -394,7 +382,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -413,7 +400,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -457,15 +443,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -503,15 +484,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -549,15 +525,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -595,15 +566,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -802,7 +768,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -821,7 +786,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -865,15 +829,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -911,15 +870,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -957,15 +911,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1003,15 +952,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1203,7 +1147,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1222,7 +1165,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1266,15 +1208,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -1312,15 +1249,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/udf_java_method.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_java_method.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/udf_java_method.q.out (working copy) @@ -101,7 +101,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -120,7 +119,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/sample5.q.out =================================================================== --- ql/src/test/results/clientpositive/sample5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample5.q.out (working copy) @@ -87,7 +87,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -107,7 +106,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/bucketcontext_8.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_8.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketcontext_8.q.out (working copy) @@ -200,15 +200,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -248,15 +243,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -400,15 +390,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -448,15 +433,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/udf_explode.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_explode.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/udf_explode.q.out (working copy) @@ -68,7 +68,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -87,7 +86,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -156,7 +154,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -175,7 +172,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -392,7 +388,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -411,7 +406,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -482,7 +476,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -501,7 +494,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/alter_numbuckets_partitioned_table2.q.out (working copy) @@ -130,12 +130,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -242,12 +236,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -362,12 +350,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -490,12 +472,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -626,12 +602,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -770,12 +740,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -922,12 +886,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -1082,12 +1040,6 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### - numFiles 1 - numPartitions 1 - numRows 500 - rawDataSize 5312 - totalSize 5812 -#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/stats14.q.out =================================================================== --- ql/src/test/results/clientpositive/stats14.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats14.q.out (working copy) @@ -43,7 +43,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 @@ -175,11 +174,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information @@ -345,11 +339,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 3 - numPartitions 3 - numRows 1500 - rawDataSize 15936 - totalSize 17436 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_12.q.out (working copy) @@ -275,15 +275,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -323,15 +318,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 11624 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -371,15 +361,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_medium - numFiles 3 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_medium { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 170 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_medium @@ -419,15 +404,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 114 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small Index: ql/src/test/results/clientpositive/rand_partitionpruner2.q.out =================================================================== --- ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/rand_partitionpruner2.q.out (working copy) @@ -113,15 +113,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -159,15 +154,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/bucketcontext_3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketcontext_3.q.out (working copy) @@ -175,15 +175,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -324,15 +319,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/bucket3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucket3.q.out (working copy) @@ -61,7 +61,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -80,7 +79,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out =================================================================== --- ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/groupby_ppr_multi_distinct.q.out (working copy) @@ -90,15 +90,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -136,15 +131,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out =================================================================== --- ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sort_merge_join_desc_6.q.out (working copy) @@ -176,15 +176,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 500 partition_columns part - rawDataSize 5312 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/bucketmapjoin8.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin8.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin8.q.out (working copy) @@ -169,15 +169,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -369,15 +364,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/bucketmapjoin12.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin12.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin12.q.out (working copy) @@ -197,15 +197,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 @@ -381,15 +376,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_1 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns part - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 2750 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_1 Index: ql/src/test/results/clientpositive/stats6.q.out =================================================================== --- ql/src/test/results/clientpositive/stats6.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats6.q.out (working copy) @@ -195,6 +195,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -238,6 +240,8 @@ Protect Mode: None #### A masked pattern was here #### Partition Parameters: + numFiles 1 + totalSize 5812 #### A masked pattern was here #### # Storage Information @@ -281,11 +285,6 @@ #### A masked pattern was here #### Table Type: MANAGED_TABLE Table Parameters: - numFiles 2 - numPartitions 2 - numRows 1000 - rawDataSize 10624 - totalSize 11624 #### A masked pattern was here #### # Storage Information Index: ql/src/test/results/clientpositive/bucketmapjoin3.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin3.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin3.q.out (working copy) @@ -226,15 +226,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part_2 - numFiles 2 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part_2 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 3062 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part_2 @@ -675,7 +670,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -728,15 +722,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -768,7 +757,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -803,7 +791,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -832,7 +819,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -851,7 +837,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -884,7 +869,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -913,7 +897,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -932,7 +915,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 564 rawDataSize 10503 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/alter_skewed_table.q.out =================================================================== --- ql/src/test/results/clientpositive/alter_skewed_table.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/alter_skewed_table.q.out (working copy) @@ -58,6 +58,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -139,6 +142,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe @@ -222,6 +228,9 @@ Table Type: MANAGED_TABLE Table Parameters: #### A masked pattern was here #### + numFiles 0 + totalSize 0 +#### A masked pattern was here #### # Storage Information SerDe Library: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe Index: ql/src/test/results/clientpositive/stats1.q.out =================================================================== --- ql/src/test/results/clientpositive/stats1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats1.q.out (working copy) @@ -205,7 +205,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 2 - numPartitions 0 numRows 26 rawDataSize 199 totalSize 225 @@ -255,7 +254,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 3 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 1583 Index: ql/src/test/results/clientpositive/smb_mapjoin_12.q.out =================================================================== --- ql/src/test/results/clientpositive/smb_mapjoin_12.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/smb_mapjoin_12.q.out (working copy) @@ -177,15 +177,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table1 - numFiles 16 - numPartitions 1 - numRows 500 partition_columns ds - rawDataSize 5312 serialization.ddl struct test_table1 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table1 @@ -353,15 +348,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table3 - numFiles 16 - numPartitions 1 - numRows 3084 partition_columns ds - rawDataSize 32904 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 35988 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -407,15 +397,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table3 - numFiles 16 - numPartitions 1 - numRows 3084 partition_columns ds - rawDataSize 32904 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 35988 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 @@ -441,15 +426,10 @@ columns.types int:string #### A masked pattern was here #### name default.test_table3 - numFiles 16 - numPartitions 1 - numRows 3084 partition_columns ds - rawDataSize 32904 serialization.ddl struct test_table3 { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 35988 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.test_table3 Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_7.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_7.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_7.q.out (working copy) @@ -336,10 +336,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -354,10 +354,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 @@ -381,10 +381,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 @@ -420,10 +420,10 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '1') (< (TOK_TABLE_OR_COL key) 8))))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (and (= (TOK_TABLE_OR_COL ds) '1') (< (TOK_TABLE_OR_COL key) 8))))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test_table3) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL a) value) (. (TOK_TABLE_OR_COL b) value)))) (TOK_WHERE (or (= (. (TOK_TABLE_OR_COL a) key) 0) (= (. (TOK_TABLE_OR_COL a) key) 5))))) @@ -526,12 +526,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -546,12 +546,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 @@ -575,12 +575,12 @@ POSTHOOK: Lineage: test_table1 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).key EXPRESSION [(src)src.FieldSchema(name:key, type:string, comment:default), ] POSTHOOK: Lineage: test_table2 PARTITION(ds=1).value SIMPLE [(src)src.FieldSchema(name:value, type:string, comment:default), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 Index: ql/src/test/results/clientpositive/join32_lessSize.q.out =================================================================== --- ql/src/test/results/clientpositive/join32_lessSize.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/join32_lessSize.q.out (working copy) @@ -109,7 +109,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -128,7 +127,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -151,7 +149,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -170,7 +167,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -309,15 +305,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -579,7 +570,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -598,7 +588,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -621,7 +610,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -640,7 +628,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -742,7 +729,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -789,7 +775,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -808,7 +793,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -831,7 +815,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -850,7 +833,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -879,7 +861,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -980,7 +961,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -1027,7 +1007,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1046,7 +1025,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1069,7 +1047,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1088,7 +1065,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1187,7 +1163,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -1234,7 +1209,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1253,7 +1227,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1276,7 +1249,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1295,7 +1267,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1389,7 +1360,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1408,7 +1378,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1431,7 +1400,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1450,7 +1418,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1497,7 +1464,6 @@ #### A masked pattern was here #### name default.dest_j1 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j1 { string key, string value, string val2} @@ -1749,7 +1715,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1768,7 +1733,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -1791,7 +1755,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1810,7 +1773,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -1949,15 +1911,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2236,7 +2193,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -2255,7 +2211,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -2278,7 +2233,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -2297,7 +2251,6 @@ #### A masked pattern was here #### name default.src1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src1 { string key, string value} @@ -2373,7 +2326,6 @@ #### A masked pattern was here #### name default.dest_j2 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j2 { string key, string value, string val2} @@ -2441,15 +2393,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -2472,7 +2419,6 @@ #### A masked pattern was here #### name default.dest_j2 numFiles 1 - numPartitions 0 numRows 85 rawDataSize 1600 serialization.ddl struct dest_j2 { string key, string value, string val2} Index: ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_7.q.out (working copy) @@ -170,15 +170,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -218,15 +213,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -370,15 +360,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -418,15 +403,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -601,15 +581,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -649,15 +624,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -696,15 +666,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -743,15 +708,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -882,15 +842,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -930,15 +885,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -977,15 +927,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1024,15 +969,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 8 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 452 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -1140,15 +1080,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -1188,15 +1123,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/outer_join_ppr.q.out =================================================================== --- ql/src/test/results/clientpositive/outer_join_ppr.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/outer_join_ppr.q.out (working copy) @@ -81,7 +81,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -100,7 +99,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -144,15 +142,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -190,15 +183,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -236,15 +224,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -282,15 +265,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -479,7 +457,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -498,7 +475,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -542,15 +518,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -588,15 +559,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -634,15 +600,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -680,15 +641,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/list_bucket_dml_10.q.out =================================================================== --- ql/src/test/results/clientpositive/list_bucket_dml_10.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/list_bucket_dml_10.q.out (working copy) @@ -118,7 +118,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -137,7 +136,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketsortoptimize_insert_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketsortoptimize_insert_2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketsortoptimize_insert_2.q.out (working copy) @@ -984,10 +984,10 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -1010,10 +1010,10 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 @@ -1048,10 +1048,10 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 val_5val_5 1 5 val_5val_5 1 5 val_5val_5 1 @@ -1094,10 +1094,10 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_TABLE_OR_COL value) (TOK_TABLE_OR_COL value)) v1)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_FUNCTION concat (TOK_TABLE_OR_COL value) (TOK_TABLE_OR_COL value)) v2)) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test_table3) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL a) key)) (TOK_SELEXPR (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL a) v1) (. (TOK_TABLE_OR_COL b) v2)))))) @@ -1202,12 +1202,12 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -1230,12 +1230,12 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 0 val_0val_0val_0val_0 1 @@ -1270,12 +1270,12 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 5 val_5val_5val_5val_5 1 @@ -1318,12 +1318,12 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] ABSTRACT SYNTAX TREE: (TOK_QUERY (TOK_FROM (TOK_JOIN (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table1))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')))) a) (TOK_SUBQUERY (TOK_QUERY (TOK_FROM (TOK_TABREF (TOK_TABNAME test_table2))) (TOK_INSERT (TOK_DESTINATION (TOK_DIR TOK_TMP_FILE)) (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key)) (TOK_SELEXPR (TOK_TABLE_OR_COL value))) (TOK_WHERE (= (TOK_TABLE_OR_COL ds) '1')))) b) (= (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL b) key)))) (TOK_INSERT (TOK_DESTINATION (TOK_TAB (TOK_TABNAME test_table3) (TOK_PARTSPEC (TOK_PARTVAL ds '1')))) (TOK_SELECT (TOK_SELEXPR (+ (. (TOK_TABLE_OR_COL a) key) (. (TOK_TABLE_OR_COL a) key))) (TOK_SELEXPR (TOK_FUNCTION concat (. (TOK_TABLE_OR_COL a) value) (. (TOK_TABLE_OR_COL b) value)))))) @@ -1624,14 +1624,14 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key EXPRESSION [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] PREHOOK: query: select * from test_table3 tablesample (bucket 1 out of 2) s where ds = '1' PREHOOK: type: QUERY PREHOOK: Input: default@test_table3 @@ -1654,14 +1654,14 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key EXPRESSION [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] 0 val_0val_0 1 0 val_0val_0 1 0 val_0val_0 1 @@ -1706,11 +1706,11 @@ POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] +POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key EXPRESSION [(test_table1)test_table1.FieldSchema(name:key, type:int, comment:null), ] POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)test_table1.FieldSchema(name:value, type:string, comment:null), (test_table2)test_table2.FieldSchema(name:value, type:string, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).key SIMPLE [(test_table1)a.FieldSchema(name:key, type:int, comment:null), ] -POSTHOOK: Lineage: test_table3 PARTITION(ds=1).value EXPRESSION [(test_table1)a.FieldSchema(name:value, type:string, comment:null), (test_table2)b.FieldSchema(name:value, type:string, comment:null), ] Index: ql/src/test/results/clientpositive/udf_reflect.q.out =================================================================== --- ql/src/test/results/clientpositive/udf_reflect.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/udf_reflect.q.out (working copy) @@ -101,7 +101,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -120,7 +119,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketmapjoin_negative2.q.out (working copy) @@ -178,7 +178,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -198,7 +197,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} Index: ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out =================================================================== --- ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/auto_sortmerge_join_2.q.out (working copy) @@ -145,15 +145,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -193,15 +188,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -376,15 +366,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -424,15 +409,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -471,15 +451,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -610,15 +585,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -658,15 +628,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -705,15 +670,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_small - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_small { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 226 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_small @@ -820,15 +780,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big @@ -868,15 +823,10 @@ columns.types string:string #### A masked pattern was here #### name default.bucket_big - numFiles 4 - numPartitions 2 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5500 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.bucket_big Index: ql/src/test/results/clientpositive/sample7.q.out =================================================================== --- ql/src/test/results/clientpositive/sample7.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample7.q.out (working copy) @@ -89,7 +89,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -109,7 +108,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/transform_ppr1.q.out =================================================================== --- ql/src/test/results/clientpositive/transform_ppr1.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/transform_ppr1.q.out (working copy) @@ -108,15 +108,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -154,15 +149,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -200,15 +190,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart @@ -246,15 +231,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/clientpositive/regexp_extract.q.out =================================================================== --- ql/src/test/results/clientpositive/regexp_extract.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/regexp_extract.q.out (working copy) @@ -88,7 +88,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -107,7 +106,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -346,7 +344,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -365,7 +362,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/bucket_map_join_2.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket_map_join_2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucket_map_join_2.q.out (working copy) @@ -127,7 +127,6 @@ #### A masked pattern was here #### name default.table1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct table1 { string key, string value} @@ -148,7 +147,6 @@ #### A masked pattern was here #### name default.table1 numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct table1 { string key, string value} Index: ql/src/test/results/clientpositive/sample2.q.out =================================================================== --- ql/src/test/results/clientpositive/sample2.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/sample2.q.out (working copy) @@ -89,7 +89,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} @@ -109,7 +108,6 @@ #### A masked pattern was here #### name default.srcbucket numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket { i32 key, string value} Index: ql/src/test/results/clientpositive/stats16.q.out =================================================================== --- ql/src/test/results/clientpositive/stats16.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats16.q.out (working copy) @@ -76,7 +76,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 500 rawDataSize 5312 totalSize 5812 Index: ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out =================================================================== --- ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/disable_merge_for_bucketing.q.out (working copy) @@ -61,7 +61,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -80,7 +79,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} Index: ql/src/test/results/clientpositive/ppd_union_view.q.out =================================================================== --- ql/src/test/results/clientpositive/ppd_union_view.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/ppd_union_view.q.out (working copy) @@ -276,15 +276,10 @@ columns.types string:string #### A masked pattern was here #### name default.t1_mapping - numFiles 2 - numPartitions 2 - numRows 2 partition_columns ds - rawDataSize 24 serialization.ddl struct t1_mapping { string key, string keymap} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 26 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_mapping @@ -321,15 +316,10 @@ columns.types string:string #### A masked pattern was here #### name default.t1_old - numFiles 2 - numPartitions 2 - numRows 2 partition_columns ds - rawDataSize 28 serialization.ddl struct t1_old { string keymap, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 30 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_old @@ -781,15 +771,10 @@ columns.types string:string #### A masked pattern was here #### name default.t1_new - numFiles 2 - numPartitions 2 - numRows 2 partition_columns ds - rawDataSize 22 serialization.ddl struct t1_new { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 24 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.t1_new Index: ql/src/test/results/clientpositive/truncate_column.q.out =================================================================== --- ql/src/test/results/clientpositive/truncate_column.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/truncate_column.q.out (working copy) @@ -39,7 +39,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 185 @@ -107,7 +106,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 150 @@ -193,7 +191,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 75 @@ -269,7 +266,6 @@ Table Type: MANAGED_TABLE Table Parameters: numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 75 @@ -362,7 +358,6 @@ Table Parameters: #### A masked pattern was here #### numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 185 @@ -443,7 +438,6 @@ Table Parameters: #### A masked pattern was here #### numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 150 @@ -526,7 +520,6 @@ Table Parameters: #### A masked pattern was here #### numFiles 1 - numPartitions 0 numRows 10 rawDataSize 94 totalSize 75 Index: ql/src/test/results/clientpositive/bucketcontext_5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucketcontext_5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucketcontext_5.q.out (working copy) @@ -139,7 +139,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -160,7 +159,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -280,7 +278,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} @@ -301,7 +298,6 @@ #### A masked pattern was here #### name default.bucket_big numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct bucket_big { string key, string value} Index: ql/src/test/results/clientpositive/stats11.q.out =================================================================== --- ql/src/test/results/clientpositive/stats11.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/stats11.q.out (working copy) @@ -392,7 +392,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -412,7 +411,6 @@ #### A masked pattern was here #### name default.srcbucket_mapjoin numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct srcbucket_mapjoin { i32 key, string value} @@ -855,7 +853,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -908,15 +905,10 @@ columns.types int:string #### A masked pattern was here #### name default.srcbucket_mapjoin_part - numFiles 4 - numPartitions 1 - numRows 0 partition_columns ds - rawDataSize 0 serialization.ddl struct srcbucket_mapjoin_part { i32 key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 5812 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcbucket_mapjoin_part @@ -948,7 +940,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -983,7 +974,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1012,7 +1002,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1031,7 +1020,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1064,7 +1052,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1093,7 +1080,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} @@ -1112,7 +1098,6 @@ #### A masked pattern was here #### name default.bucketmapjoin_tmp_result numFiles 1 - numPartitions 0 numRows 464 rawDataSize 8519 serialization.ddl struct bucketmapjoin_tmp_result { string key, string value1, string value2} Index: ql/src/test/results/clientpositive/bucket5.q.out =================================================================== --- ql/src/test/results/clientpositive/bucket5.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/bucket5.q.out (working copy) @@ -107,7 +107,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -126,7 +125,6 @@ #### A masked pattern was here #### name default.src numFiles 1 - numPartitions 0 numRows 0 rawDataSize 0 serialization.ddl struct src { string key, string value} @@ -492,7 +490,6 @@ Table Parameters: SORTBUCKETCOLSPREFIX TRUE numFiles 2 - numPartitions 0 numRows 0 rawDataSize 0 totalSize 5812 Index: ql/src/test/results/clientpositive/input23.q.out =================================================================== --- ql/src/test/results/clientpositive/input23.q.out (revision 1478217) +++ ql/src/test/results/clientpositive/input23.q.out (working copy) @@ -89,15 +89,10 @@ columns.types string:string #### A masked pattern was here #### name default.srcpart - numFiles 4 - numPartitions 4 - numRows 0 partition_columns ds/hr - rawDataSize 0 serialization.ddl struct srcpart { string key, string value} serialization.format 1 serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe - totalSize 23248 #### A masked pattern was here #### serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe name: default.srcpart Index: ql/src/test/results/compiler/plan/join2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join2.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/join2.q.xml (working copy) @@ -279,10 +279,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1086,10 +1082,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1168,10 +1160,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1816,10 +1804,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1964,10 +1948,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2684,10 +2664,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2766,10 +2742,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input2.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input2.q.xml (working copy) @@ -1687,10 +1687,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2826,10 +2822,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2908,10 +2900,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join3.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/join3.q.xml (working copy) @@ -268,10 +268,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -416,10 +412,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -564,10 +556,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1565,10 +1553,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1647,10 +1631,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input3.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input3.q.xml (working copy) @@ -2064,10 +2064,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -3495,10 +3491,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -3577,10 +3569,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join4.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/join4.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -263,10 +259,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1572,10 +1564,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1654,10 +1642,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input4.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input4.q.xml (working copy) @@ -268,10 +268,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1086,10 +1082,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1168,10 +1160,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join5.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/join5.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -263,10 +259,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1572,10 +1564,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1654,10 +1642,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input5.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input5.q.xml (working copy) @@ -272,10 +272,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1116,10 +1112,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1202,10 +1194,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex Index: ql/src/test/results/compiler/plan/join6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join6.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/join6.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -263,10 +259,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1572,10 +1564,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1654,10 +1642,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input_testxpath2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath2.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input_testxpath2.q.xml (working copy) @@ -119,10 +119,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -937,10 +933,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1023,10 +1015,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex Index: ql/src/test/results/compiler/plan/input6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input6.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input6.q.xml (working copy) @@ -640,10 +640,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1153,10 +1149,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1235,10 +1227,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join7.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/join7.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -263,10 +259,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -411,10 +403,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2306,10 +2294,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2388,10 +2372,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input7.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input7.q.xml (working copy) @@ -640,10 +640,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1062,10 +1058,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1144,10 +1136,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input8.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input8.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -727,10 +723,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -809,10 +801,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join8.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join8.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/join8.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -263,10 +259,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1654,10 +1646,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1736,10 +1724,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input_testsequencefile.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input_testsequencefile.q.xml (working copy) @@ -640,10 +640,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1070,10 +1066,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1152,10 +1144,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input9.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input9.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input9.q.xml (working copy) @@ -640,10 +640,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1143,10 +1139,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1225,10 +1217,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/union.q.xml =================================================================== --- ql/src/test/results/compiler/plan/union.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/union.q.xml (working copy) @@ -513,10 +513,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -661,10 +657,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1719,10 +1711,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1801,10 +1789,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/udf1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf1.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/udf1.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1982,10 +1978,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -2064,10 +2056,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input_testxpath.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_testxpath.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input_testxpath.q.xml (working copy) @@ -119,10 +119,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -839,10 +835,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -925,10 +917,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex Index: ql/src/test/results/compiler/plan/udf6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf6.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/udf6.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -658,10 +654,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -740,10 +732,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input_part1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input_part1.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input_part1.q.xml (working copy) @@ -120,10 +120,6 @@ default.srcpart - numFiles - 4 - - columns.types string:string @@ -140,18 +136,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 4 - - partition_columns ds/hr @@ -168,10 +152,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 23248 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -934,10 +914,6 @@ default.srcpart - numFiles - 4 - - columns.types string:string @@ -954,18 +930,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 4 - - partition_columns ds/hr @@ -982,10 +946,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 23248 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/compiler/plan/groupby1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby1.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/groupby1.q.xml (working copy) @@ -268,10 +268,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1074,10 +1070,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1156,10 +1148,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby2.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/groupby2.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1179,10 +1175,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1261,10 +1253,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/udf_case.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_case.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/udf_case.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -752,10 +748,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -834,10 +826,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby3.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/groupby3.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1378,10 +1374,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1460,10 +1452,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/subq.q.xml =================================================================== --- ql/src/test/results/compiler/plan/subq.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/subq.q.xml (working copy) @@ -513,10 +513,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1037,10 +1033,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1119,10 +1111,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby4.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/groupby4.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -825,10 +821,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -907,10 +899,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby5.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/groupby5.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -918,10 +914,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1000,10 +992,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/groupby6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/groupby6.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/groupby6.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -825,10 +821,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -907,10 +899,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/case_sensitivity.q.xml =================================================================== --- ql/src/test/results/compiler/plan/case_sensitivity.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/case_sensitivity.q.xml (working copy) @@ -644,10 +644,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1352,10 +1348,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex @@ -1438,10 +1430,6 @@ 0 - numPartitions - 0 - - serialization.class org.apache.hadoop.hive.serde2.thrift.test.Complex Index: ql/src/test/results/compiler/plan/udf_when.q.xml =================================================================== --- ql/src/test/results/compiler/plan/udf_when.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/udf_when.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -832,10 +828,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -914,10 +906,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input20.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input20.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input20.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -933,10 +929,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1015,10 +1007,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/sample1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample1.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/sample1.q.xml (working copy) @@ -120,10 +120,6 @@ default.srcpart - numFiles - 4 - - columns.types string:string @@ -140,18 +136,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 4 - - partition_columns ds/hr @@ -168,10 +152,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 23248 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat @@ -1042,10 +1022,6 @@ default.srcpart - numFiles - 4 - - columns.types string:string @@ -1062,18 +1038,6 @@ key,value - rawDataSize - 0 - - - numRows - 0 - - - numPartitions - 4 - - partition_columns ds/hr @@ -1090,10 +1054,6 @@ org.apache.hadoop.mapred.TextInputFormat - totalSize - 23248 - - file.outputformat org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat Index: ql/src/test/results/compiler/plan/sample2.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample2.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/sample2.q.xml (working copy) @@ -648,10 +648,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1276,10 +1272,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1362,10 +1354,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample3.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample3.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/sample3.q.xml (working copy) @@ -648,10 +648,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1286,10 +1282,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1372,10 +1364,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample4.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample4.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/sample4.q.xml (working copy) @@ -648,10 +648,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1276,10 +1272,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1362,10 +1354,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample5.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample5.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/sample5.q.xml (working copy) @@ -648,10 +648,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1273,10 +1269,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1359,10 +1351,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample6.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample6.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/sample6.q.xml (working copy) @@ -648,10 +648,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1276,10 +1272,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1362,10 +1354,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/sample7.q.xml =================================================================== --- ql/src/test/results/compiler/plan/sample7.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/sample7.q.xml (working copy) @@ -648,10 +648,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1321,10 +1317,6 @@ 0 - numPartitions - 0 - - bucket_count 2 @@ -1407,10 +1399,6 @@ 0 - numPartitions - 0 - - bucket_count 2 Index: ql/src/test/results/compiler/plan/cast1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/cast1.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/cast1.q.xml (working copy) @@ -115,10 +115,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1156,10 +1152,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1238,10 +1230,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/join1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/join1.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/join1.q.xml (working copy) @@ -268,10 +268,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -416,10 +412,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1153,10 +1145,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1235,10 +1223,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/results/compiler/plan/input1.q.xml =================================================================== --- ql/src/test/results/compiler/plan/input1.q.xml (revision 1478217) +++ ql/src/test/results/compiler/plan/input1.q.xml (working copy) @@ -640,10 +640,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1167,10 +1163,6 @@ 0 - numPartitions - 0 - - bucket_count -1 @@ -1249,10 +1241,6 @@ 0 - numPartitions - 0 - - bucket_count -1 Index: ql/src/test/org/apache/hadoop/hive/ql/exec/TestStatsPublisherEnhanced.java =================================================================== --- ql/src/test/org/apache/hadoop/hive/ql/exec/TestStatsPublisherEnhanced.java (revision 1478217) +++ ql/src/test/org/apache/hadoop/hive/ql/exec/TestStatsPublisherEnhanced.java (working copy) @@ -24,11 +24,11 @@ import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.stats.StatsAggregator; import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.mapred.JobConf; /** Index: ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/plan/ConditionalResolverMergeFiles.java (working copy) @@ -28,10 +28,10 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.ql.exec.Task; -import org.apache.hadoop.hive.ql.exec.Utilities; /** * Conditional task resolution interface. This is invoked at run time to get the @@ -226,7 +226,7 @@ throws IOException { DynamicPartitionCtx dpCtx = ctx.getDPCtx(); // get list of dynamic partitions - FileStatus[] status = Utilities.getFileStatusRecurse(dirPath, dpLbLevel, inpFs); + FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(dirPath, dpLbLevel, inpFs); // cleanup pathToPartitionInfo Map ptpi = work.getPathToPartitionInfo(); Index: ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (working copy) @@ -48,6 +48,8 @@ import org.apache.hadoop.fs.FsShell; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.HiveStatsUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaException; import org.apache.hadoop.hive.metastore.HiveMetaHook; @@ -141,6 +143,7 @@ String newVar = c.get(oneVar.varname, ""); if (oldVar.compareToIgnoreCase(newVar) != 0) { needsRefresh = true; +// db.getConf().set(oneVar.varname, newVar); break; } } @@ -1353,7 +1356,7 @@ new ArrayList>(); FileSystem fs = loadPath.getFileSystem(conf); - FileStatus[] leafStatus = Utilities.getFileStatusRecurse(loadPath, numDP+1, fs); + FileStatus[] leafStatus = HiveStatsUtils.getFileStatusRecurse(loadPath, numDP+1, fs); // Check for empty partitions for (FileStatus s : leafStatus) { // Check if the hadoop version supports sub-directories for tables/partitions @@ -1546,6 +1549,18 @@ return getPartition(tbl, partSpec, forceCreate, null, true); } + private static void clearPartitionStats(org.apache.hadoop.hive.metastore.api.Partition tpart) { + Map tpartParams = tpart.getParameters(); + if (tpartParams == null) { + return; + } + List statTypes = StatsSetupConst.getSupportedStats(); + for (String statType : statTypes) { + tpartParams.remove(statType); + } + tpart.setParameters(tpartParams); + } + /** * Returns partition metadata * @@ -1615,6 +1630,7 @@ throw new HiveException("new partition path should not be null or empty."); } tpart.getSd().setLocation(partPath); + clearPartitionStats(tpart); String fullName = tbl.getTableName(); if (!org.apache.commons.lang.StringUtils.isEmpty(tbl.getDbName())) { fullName = tbl.getDbName() + "." + tbl.getTableName(); @@ -2438,4 +2454,5 @@ private static String[] getQualifiedNames(String qualifiedName) { return qualifiedName.split("\\."); } + }; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (working copy) @@ -34,6 +34,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.HiveStatsUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.metastore.api.SkewedValueList; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils; @@ -49,7 +51,6 @@ import org.apache.hadoop.hive.ql.plan.SkewedColumnPositionPair; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.Serializer; @@ -819,7 +820,7 @@ if (conf.isLinkedFileSink()) { level++; } - FileStatus[] status = Utilities.getFileStatusRecurse(tmpPath, level, fs); + FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(tmpPath, level, fs); sb.append("Sample of ") .append(Math.min(status.length, 100)) .append(" partitions created under ") Index: ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/TableScanOperator.java (working copy) @@ -27,6 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.FileUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; @@ -34,7 +35,6 @@ import org.apache.hadoop.hive.ql.plan.TableScanDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; Index: ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (working copy) @@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.ql.Context; @@ -48,12 +49,16 @@ import org.apache.hadoop.hive.ql.stats.StatsAggregator; import org.apache.hadoop.hive.ql.stats.StatsFactory; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.util.StringUtils; /** - * StatsTask implementation. + * StatsTask implementation. StatsTask mainly deals with "collectable" stats. These are + * stats that require data scanning and are collected during query execution (unless the user + * explicitly requests data scanning just for the purpose of stats computation using the "ANALYZE" + * command. All other stats are computed directly by the MetaStore. The rationale being that the + * MetaStore layer covers all Thrift calls and provides better guarantees about the accuracy of + * those stats. **/ public class StatsTask extends Task implements Serializable { @@ -63,24 +68,16 @@ private Table table; private List> dpPartSpecs; - private static final List supportedStats = new ArrayList(); - private static final List collectableStats = new ArrayList(); - private static final Map nameMapping = new HashMap(); - static { - // supported statistics - supportedStats.add(StatsSetupConst.NUM_FILES); - supportedStats.add(StatsSetupConst.ROW_COUNT); - supportedStats.add(StatsSetupConst.TOTAL_SIZE); - supportedStats.add(StatsSetupConst.RAW_DATA_SIZE); + private static final List collectableStats; + private static final List supportedStats; + private static final List fastStats; + private static final Map nameMapping; - // statistics that need to be collected throughout the execution - collectableStats.add(StatsSetupConst.ROW_COUNT); - collectableStats.add(StatsSetupConst.RAW_DATA_SIZE); - - nameMapping.put(StatsSetupConst.NUM_FILES, "num_files"); - nameMapping.put(StatsSetupConst.ROW_COUNT, "num_rows"); - nameMapping.put(StatsSetupConst.TOTAL_SIZE, "total_size"); - nameMapping.put(StatsSetupConst.RAW_DATA_SIZE, "raw_data_size"); + static { + collectableStats = StatsSetupConst.getCollectableStats(); + supportedStats = StatsSetupConst.getSupportedStats(); + fastStats = StatsSetupConst.getFastStats(); + nameMapping = StatsSetupConst.getNameMapping(); } public StatsTask() { @@ -90,20 +87,20 @@ /** * - * Partition Level Statistics. + * Statistics for a Partition or Unpartitioned Table * */ - class PartitionStatistics { + class Statistics { Map stats; - public PartitionStatistics() { + public Statistics() { stats = new HashMap(); for (String statType : supportedStats) { stats.put(statType, new LongWritable(0L)); } } - public PartitionStatistics(Map st) { + public Statistics(Map st) { stats = new HashMap(); for (String statType : st.keySet()) { Long stValue = st.get(statType) == null ? 0L : st.get(statType); @@ -131,80 +128,6 @@ } } - /** - * Table Level Statistics. - */ - class TableStatistics extends PartitionStatistics { - int numPartitions; // number of partitions - - public TableStatistics() { - super(); - numPartitions = 0; - } - - public void setNumPartitions(int np) { - numPartitions = np; - } - - public int getNumPartitions() { - return numPartitions; - } - - /** - * Incrementally update the table statistics according to the old and new - * partition level statistics. - * - * @param oldStats - * The old statistics of a partition. - * @param newStats - * The new statistics of a partition. - */ - public void updateStats(PartitionStatistics oldStats, PartitionStatistics newStats) { - deletePartitionStats(oldStats); - addPartitionStats(newStats); - } - - /** - * Update the table level statistics when a new partition is added. - * - * @param newStats - * the new partition statistics. - */ - public void addPartitionStats(PartitionStatistics newStats) { - for (String statType : supportedStats) { - LongWritable value = stats.get(statType); - if (value == null) { - stats.put(statType, new LongWritable(newStats.getStat(statType))); - } else { - value.set(value.get() + newStats.getStat(statType)); - } - } - this.numPartitions++; - } - - /** - * Update the table level statistics when an old partition is dropped. - * - * @param oldStats - * the old partition statistics. - */ - public void deletePartitionStats(PartitionStatistics oldStats) { - for (String statType : supportedStats) { - LongWritable value = stats.get(statType); - value.set(value.get() - oldStats.getStat(statType)); - } - this.numPartitions--; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("num_partitions: ").append(numPartitions).append(", "); - sb.append(super.toString()); - return sb.toString(); - } - } - @Override protected void receiveFeed(FeedType feedType, Object feedValue) { // this method should be called by MoveTask when there are dynamic partitions generated @@ -300,7 +223,7 @@ } } - TableStatistics tblStats = new TableStatistics(); + Statistics tblStats = new Statistics(); org.apache.hadoop.hive.metastore.api.Table tTable = table.getTTable(); Map parameters = tTable.getParameters(); @@ -313,10 +236,6 @@ } } - if (parameters.containsKey(StatsSetupConst.NUM_PARTITIONS)) { - tblStats.setNumPartitions(Integer.parseInt(parameters.get(StatsSetupConst.NUM_PARTITIONS))); - } - List partitions = getPartitionsList(); boolean atomic = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_STATS_ATOMIC); int maxPrefixLength = HiveConf.getIntVar(conf, @@ -327,17 +246,7 @@ if (!tableStatsExist && atomic) { return 0; } - Path tablePath = wh.getTablePath(db.getDatabase(table.getDbName()), table.getTableName()); - fileSys = tablePath.getFileSystem(conf); - fileStatus = Utilities.getFileStatusRecurse(tablePath, 1, fileSys); - tblStats.setStat(StatsSetupConst.NUM_FILES, fileStatus.length); - long tableSize = 0L; - for (int i = 0; i < fileStatus.length; i++) { - tableSize += fileStatus[i].getLen(); - } - tblStats.setStat(StatsSetupConst.TOTAL_SIZE, tableSize); - // In case of a non-partitioned table, the key for stats temporary store is "rootDir" if (statsAggregator != null) { String aggKey = Utilities.getHashedStatsPrefix(work.getAggKey(), maxPrefixLength); @@ -354,6 +263,19 @@ } } } + + // write table stats to metastore + parameters = tTable.getParameters(); + for (String statType : collectableStats) { + parameters.put(statType, Long.toString(tblStats.getStat(statType))); + } + tTable.setParameters(parameters); + + String tableFullName = table.getDbName() + "." + table.getTableName(); + + db.alterTable(tableFullName, new Table(tTable)); + + console.printInfo("Table " + tableFullName + " stats: [" + tblStats.toString() + ']'); } else { // Partitioned table: // Need to get the old stats of the partition @@ -380,7 +302,7 @@ // // get the new partition stats // - PartitionStatistics newPartStats = new PartitionStatistics(); + Statistics newPartStats = new Statistics(); // In that case of a partition, the key for stats temporary store is // "rootDir/[dynamic_partition_specs/]%" @@ -408,26 +330,17 @@ } } - fileSys = partn.getPartitionPath().getFileSystem(conf); - /* consider sub-directory created from list bucketing. */ - int listBucketingDepth = calculateListBucketingDMLDepth(partn); - fileStatus = Utilities.getFileStatusRecurse(partn.getPartitionPath(), - (1 + listBucketingDepth), fileSys); - newPartStats.setStat(StatsSetupConst.NUM_FILES, fileStatus.length); - - long partitionSize = 0L; - for (int i = 0; i < fileStatus.length; i++) { - partitionSize += fileStatus[i].getLen(); + /** + * calculate fast statistics + */ + FileStatus[] partfileStatus = wh.getFileStatusesForPartition(tPart); + newPartStats.setStat(StatsSetupConst.NUM_FILES, partfileStatus.length); + long partSize = 0L; + for (int i = 0; i < partfileStatus.length; i++) { + partSize += partfileStatus[i].getLen(); } - newPartStats.setStat(StatsSetupConst.TOTAL_SIZE, partitionSize); + newPartStats.setStat(StatsSetupConst.TOTAL_SIZE, partSize); - if (hasStats) { - PartitionStatistics oldPartStats = new PartitionStatistics(currentValues); - tblStats.updateStats(oldPartStats, newPartStats); - } else { - tblStats.addPartitionStats(newPartStats); - } - // // update the metastore // @@ -448,22 +361,6 @@ } - // - // write table stats to metastore - // - parameters = tTable.getParameters(); - for (String statType : supportedStats) { - parameters.put(statType, Long.toString(tblStats.getStat(statType))); - } - parameters.put(StatsSetupConst.NUM_PARTITIONS, Integer.toString(tblStats.getNumPartitions())); - tTable.setParameters(parameters); - - String tableFullName = table.getDbName() + "." + table.getTableName(); - - db.alterTable(tableFullName, new Table(tTable)); - - console.printInfo("Table " + tableFullName + " stats: [" + tblStats.toString() + ']'); - } catch (Exception e) { console.printInfo("[Warning] could not update stats.", "Failed with exception " + e.getMessage() + "\n" @@ -483,28 +380,6 @@ return ret; } - /** - * List bucketing will introduce sub-directories. - * - * calculate it here in order to go to the leaf directory - * - * so that we can count right number of files. - * - * @param partn - * @return - */ - private int calculateListBucketingDMLDepth(Partition partn) { - // list bucketing will introduce more files - int listBucketingDepth = 0; - if ((partn.getSkewedColNames() != null) && (partn.getSkewedColNames().size() > 0) - && (partn.getSkewedColValues() != null) && (partn.getSkewedColValues().size() > 0) - && (partn.getSkewedColValueLocationMaps() != null) - && (partn.getSkewedColValueLocationMaps().size() > 0)) { - listBucketingDepth = partn.getSkewedColNames().size(); - } - return listBucketingDepth; - } - private boolean existStats(Map parameters) { return parameters.containsKey(StatsSetupConst.ROW_COUNT) || parameters.containsKey(StatsSetupConst.NUM_FILES) @@ -513,7 +388,7 @@ || parameters.containsKey(StatsSetupConst.NUM_PARTITIONS); } - private void updateStats(List statsList, PartitionStatistics stats, + private void updateStats(List statsList, Statistics stats, StatsAggregator statsAggregator, Map parameters, String aggKey, boolean atomic) throws HiveException { Index: ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java (working copy) @@ -89,6 +89,7 @@ import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hive.common.HiveInterruptCallback; import org.apache.hadoop.hive.common.HiveInterruptUtils; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; @@ -1312,30 +1313,6 @@ return snew.toString(); } - /** - * Get all file status from a root path and recursively go deep into certain levels. - * - * @param path - * the root path - * @param level - * the depth of directory should explore - * @param fs - * the file system - * @return array of FileStatus - * @throws IOException - */ - public static FileStatus[] getFileStatusRecurse(Path path, int level, FileSystem fs) - throws IOException { - - // construct a path pattern (e.g., /*/*) to find all dynamically generated paths - StringBuilder sb = new StringBuilder(path.toUri().getPath()); - for (int i = 0; i < level; ++i) { - sb.append(Path.SEPARATOR).append("*"); - } - Path pathPattern = new Path(path, sb.toString()); - return fs.globStatus(pathPattern); - } - public static void mvFileToFinalPath(String specPath, Configuration hconf, boolean success, Log log, DynamicPartitionCtx dpCtx, FileSinkDesc conf, Reporter reporter) throws IOException, @@ -1441,7 +1418,7 @@ ArrayList result = new ArrayList(); if (dpCtx != null) { - FileStatus parts[] = getFileStatusRecurse(path, dpCtx.getNumDPCols(), fs); + FileStatus parts[] = HiveStatsUtils.getFileStatusRecurse(path, dpCtx.getNumDPCols(), fs); HashMap taskIDToFile = null; for (int i = 0; i < parts.length; ++i) { @@ -1940,7 +1917,7 @@ Path loadPath = new Path(dpCtx.getRootPath()); FileSystem fs = loadPath.getFileSystem(conf); int numDPCols = dpCtx.getNumDPCols(); - FileStatus[] status = Utilities.getFileStatusRecurse(loadPath, numDPCols, fs); + FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(loadPath, numDPCols, fs); if (status.length == 0) { LOG.warn("No partition is generated by dynamic partitioning"); Index: ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/exec/MoveTask.java (working copy) @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.InvalidOperationException; @@ -191,11 +192,11 @@ } } - @Override public int execute(DriverContext driverContext) { try { + // Do any hive related operations like moving tables and files // to appropriate locations LoadFileDesc lfd = work.getLoadFileWork(); @@ -453,7 +454,7 @@ boolean updateBucketCols = false; if (bucketCols != null) { FileSystem fileSys = partn.getPartitionPath().getFileSystem(conf); - FileStatus[] fileStatus = Utilities.getFileStatusRecurse( + FileStatus[] fileStatus = HiveStatsUtils.getFileStatusRecurse( partn.getPartitionPath(), 1, fileSys); // Verify the number of buckets equals the number of files // This will not hold for dynamic partitions where not every reducer produced a file for Index: ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsUtils.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsUtils.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/stats/jdbc/JDBCStatsUtils.java (working copy) @@ -23,7 +23,7 @@ import java.util.List; import java.util.Map; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; +import org.apache.hadoop.hive.common.StatsSetupConst; public class JDBCStatsUtils { Index: ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsFactory.java (working copy) @@ -24,6 +24,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.JavaUtils; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.util.ReflectionUtils; Index: ql/src/java/org/apache/hadoop/hive/ql/stats/StatsSetupConst.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/stats/StatsSetupConst.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/stats/StatsSetupConst.java (working copy) @@ -1,64 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.hive.ql.stats; - -/** - * A class that defines the constant strings used by the statistics implementation. - */ - -public class StatsSetupConst { - - /** - * The value of the user variable "hive.stats.dbclass" to use HBase implementation. - */ - public static final String HBASE_IMPL_CLASS_VAL = "hbase"; - - /** - * The value of the user variable "hive.stats.dbclass" to use JDBC implementation. - */ - public static final String JDBC_IMPL_CLASS_VAL = "jdbc"; - - /** - * The name of the statistic Num Files to be published or gathered. - */ - public static final String NUM_FILES = "numFiles"; - - /** - * The name of the statistic Num Partitions to be published or gathered. - */ - public static final String NUM_PARTITIONS = "numPartitions"; - - /** - * The name of the statistic Total Size to be published or gathered. - */ - public static final String TOTAL_SIZE = "totalSize"; - - - // statistics stored in metastore - - /** - * The name of the statistic Row Count to be published or gathered. - */ - public static final String ROW_COUNT = "numRows"; - - /** - * The name of the statistic Raw Data Size to be published or gathered. - */ - public static final String RAW_DATA_SIZE = "rawDataSize"; - -} Index: ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/MergeWork.java (working copy) @@ -27,8 +27,8 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hive.common.HiveStatsUtils; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.io.CombineHiveInputFormat; import org.apache.hadoop.hive.ql.plan.DynamicPartitionCtx; import org.apache.hadoop.hive.ql.plan.Explain; @@ -155,7 +155,7 @@ Path dirPath = new Path(dirName); try { FileSystem inpFs = dirPath.getFileSystem(conf); - FileStatus[] status = Utilities.getFileStatusRecurse(dirPath, listBucketingCtx + FileStatus[] status = HiveStatsUtils.getFileStatusRecurse(dirPath, listBucketingCtx .getSkewedColNames().size(), inpFs); List newInputPath = new ArrayList(); boolean succeed = true; Index: ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java =================================================================== --- ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java (revision 1478217) +++ ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/stats/PartialScanMapper.java (working copy) @@ -24,6 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.ErrorMsg; @@ -33,7 +34,6 @@ import org.apache.hadoop.hive.ql.io.rcfile.merge.RCFileValueBufferWrapper; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.stats.StatsPublisher; -import org.apache.hadoop.hive.ql.stats.StatsSetupConst; import org.apache.hadoop.hive.shims.CombineHiveKey; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.MapReduceBase;